uisikdag commited on
Commit
842d213
·
verified ·
1 Parent(s): c1240b0

Upload folder using huggingface_hub

Browse files
README.md ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ ---
3
+ library_name: transformers
4
+ tags:
5
+ - autotrain
6
+ - text-classification
7
+ base_model: answerdotai/ModernBERT-large
8
+ widget:
9
+ - text: "I love AutoTrain"
10
+ datasets:
11
+ - uisikdag/42news
12
+ ---
13
+
14
+ # Model Trained Using AutoTrain
15
+
16
+ - Problem type: Text Classification
17
+
18
+ ## Validation Metrics
19
+ loss: 1.0703290700912476
20
+
21
+ f1_macro: 0.5315462561949883
22
+
23
+ f1_micro: 0.6112102820421278
24
+
25
+ f1_weighted: 0.6025346871719214
26
+
27
+ precision_macro: 0.6157278778731476
28
+
29
+ precision_micro: 0.6112102820421278
30
+
31
+ precision_weighted: 0.635344882882464
32
+
33
+ recall_macro: 0.5271202118810513
34
+
35
+ recall_micro: 0.6112102820421278
36
+
37
+ recall_weighted: 0.6112102820421278
38
+
39
+ accuracy: 0.6112102820421278
checkpoint-8398/config.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "answerdotai/ModernBERT-large",
3
+ "_num_labels": 13,
4
+ "architectures": [
5
+ "ModernBertForSequenceClassification"
6
+ ],
7
+ "attention_bias": false,
8
+ "attention_dropout": 0.0,
9
+ "bos_token_id": 50281,
10
+ "classifier_activation": "gelu",
11
+ "classifier_bias": false,
12
+ "classifier_dropout": 0.0,
13
+ "classifier_pooling": "mean",
14
+ "cls_token_id": 50281,
15
+ "decoder_bias": true,
16
+ "deterministic_flash_attn": false,
17
+ "embedding_dropout": 0.0,
18
+ "eos_token_id": 50282,
19
+ "global_attn_every_n_layers": 3,
20
+ "global_rope_theta": 160000.0,
21
+ "gradient_checkpointing": false,
22
+ "hidden_activation": "gelu",
23
+ "hidden_size": 1024,
24
+ "id2label": {
25
+ "0": "dunya",
26
+ "1": "ekonomi",
27
+ "2": "genel",
28
+ "3": "guncel",
29
+ "4": "kultur-sanat",
30
+ "5": "magazin",
31
+ "6": "planet",
32
+ "7": "saglik",
33
+ "8": "siyaset",
34
+ "9": "spor",
35
+ "10": "teknoloji",
36
+ "11": "turkiye",
37
+ "12": "yasam"
38
+ },
39
+ "initializer_cutoff_factor": 2.0,
40
+ "initializer_range": 0.02,
41
+ "intermediate_size": 2624,
42
+ "label2id": {
43
+ "dunya": 0,
44
+ "ekonomi": 1,
45
+ "genel": 2,
46
+ "guncel": 3,
47
+ "kultur-sanat": 4,
48
+ "magazin": 5,
49
+ "planet": 6,
50
+ "saglik": 7,
51
+ "siyaset": 8,
52
+ "spor": 9,
53
+ "teknoloji": 10,
54
+ "turkiye": 11,
55
+ "yasam": 12
56
+ },
57
+ "layer_norm_eps": 1e-05,
58
+ "local_attention": 128,
59
+ "local_rope_theta": 10000.0,
60
+ "max_position_embeddings": 8192,
61
+ "mlp_bias": false,
62
+ "mlp_dropout": 0.0,
63
+ "model_type": "modernbert",
64
+ "norm_bias": false,
65
+ "norm_eps": 1e-05,
66
+ "num_attention_heads": 16,
67
+ "num_hidden_layers": 28,
68
+ "pad_token_id": 50283,
69
+ "position_embedding_type": "absolute",
70
+ "problem_type": "single_label_classification",
71
+ "reference_compile": true,
72
+ "repad_logits_with_grad": false,
73
+ "sep_token_id": 50282,
74
+ "sparse_pred_ignore_index": -100,
75
+ "sparse_prediction": false,
76
+ "torch_dtype": "float32",
77
+ "transformers_version": "4.48.0",
78
+ "vocab_size": 50368
79
+ }
checkpoint-8398/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b75e3dd58b60ccea590aaa484c65e1584092399c75cb03d73d08cc771b5b9a22
3
+ size 1583396740
checkpoint-8398/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6df8d41534a9aee8cdb9a1d9c24ff3a8a8fc3bccabce88b77a0a23c1fdf9b975
3
+ size 3166903290
checkpoint-8398/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f37c40ce327861a7ca13b719d3aa37510a143368b6e74358bdb14becb3899e1e
3
+ size 14244
checkpoint-8398/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89d9ae34a0bc30130c48a9abe4b837d614c8c8d2abda68571fb298759f6810c4
3
+ size 1064
checkpoint-8398/trainer_state.json ADDED
@@ -0,0 +1,2423 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 1.0703290700912476,
3
+ "best_model_checkpoint": "autotrain-modernBERT-large/checkpoint-8398",
4
+ "epoch": 2.0,
5
+ "eval_steps": 500,
6
+ "global_step": 8398,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.005953798523457966,
13
+ "grad_norm": 86.369140625,
14
+ "learning_rate": 2.7387473207906647e-07,
15
+ "loss": 2.6586,
16
+ "step": 25
17
+ },
18
+ {
19
+ "epoch": 0.011907597046915932,
20
+ "grad_norm": 20.970399856567383,
21
+ "learning_rate": 5.715646582519647e-07,
22
+ "loss": 2.41,
23
+ "step": 50
24
+ },
25
+ {
26
+ "epoch": 0.0178613955703739,
27
+ "grad_norm": 24.44964599609375,
28
+ "learning_rate": 8.573469873779472e-07,
29
+ "loss": 2.3485,
30
+ "step": 75
31
+ },
32
+ {
33
+ "epoch": 0.023815194093831864,
34
+ "grad_norm": 27.326494216918945,
35
+ "learning_rate": 1.1550369135508456e-06,
36
+ "loss": 2.3665,
37
+ "step": 100
38
+ },
39
+ {
40
+ "epoch": 0.02976899261728983,
41
+ "grad_norm": 26.42802619934082,
42
+ "learning_rate": 1.4527268397237438e-06,
43
+ "loss": 2.3727,
44
+ "step": 125
45
+ },
46
+ {
47
+ "epoch": 0.0357227911407478,
48
+ "grad_norm": 21.59549903869629,
49
+ "learning_rate": 1.7504167658966421e-06,
50
+ "loss": 2.3285,
51
+ "step": 150
52
+ },
53
+ {
54
+ "epoch": 0.04167658966420576,
55
+ "grad_norm": 20.33954620361328,
56
+ "learning_rate": 2.0481066920695406e-06,
57
+ "loss": 2.2651,
58
+ "step": 175
59
+ },
60
+ {
61
+ "epoch": 0.04763038818766373,
62
+ "grad_norm": 18.53624153137207,
63
+ "learning_rate": 2.345796618242439e-06,
64
+ "loss": 2.294,
65
+ "step": 200
66
+ },
67
+ {
68
+ "epoch": 0.053584186711121695,
69
+ "grad_norm": 18.521059036254883,
70
+ "learning_rate": 2.643486544415337e-06,
71
+ "loss": 2.244,
72
+ "step": 225
73
+ },
74
+ {
75
+ "epoch": 0.05953798523457966,
76
+ "grad_norm": 22.079322814941406,
77
+ "learning_rate": 2.9411764705882355e-06,
78
+ "loss": 2.2435,
79
+ "step": 250
80
+ },
81
+ {
82
+ "epoch": 0.06549178375803763,
83
+ "grad_norm": 14.750000953674316,
84
+ "learning_rate": 3.2388663967611337e-06,
85
+ "loss": 2.2774,
86
+ "step": 275
87
+ },
88
+ {
89
+ "epoch": 0.0714455822814956,
90
+ "grad_norm": 18.668041229248047,
91
+ "learning_rate": 3.536556322934032e-06,
92
+ "loss": 2.1298,
93
+ "step": 300
94
+ },
95
+ {
96
+ "epoch": 0.07739938080495357,
97
+ "grad_norm": 33.25010681152344,
98
+ "learning_rate": 3.83424624910693e-06,
99
+ "loss": 2.1615,
100
+ "step": 325
101
+ },
102
+ {
103
+ "epoch": 0.08335317932841152,
104
+ "grad_norm": 120.11493682861328,
105
+ "learning_rate": 4.120028578232913e-06,
106
+ "loss": 2.3505,
107
+ "step": 350
108
+ },
109
+ {
110
+ "epoch": 0.08930697785186949,
111
+ "grad_norm": 33.988765716552734,
112
+ "learning_rate": 4.417718504405811e-06,
113
+ "loss": 2.1311,
114
+ "step": 375
115
+ },
116
+ {
117
+ "epoch": 0.09526077637532746,
118
+ "grad_norm": 21.629106521606445,
119
+ "learning_rate": 4.715408430578709e-06,
120
+ "loss": 2.0927,
121
+ "step": 400
122
+ },
123
+ {
124
+ "epoch": 0.10121457489878542,
125
+ "grad_norm": 25.016613006591797,
126
+ "learning_rate": 5.013098356751608e-06,
127
+ "loss": 2.0797,
128
+ "step": 425
129
+ },
130
+ {
131
+ "epoch": 0.10716837342224339,
132
+ "grad_norm": 18.86359214782715,
133
+ "learning_rate": 5.310788282924506e-06,
134
+ "loss": 1.9078,
135
+ "step": 450
136
+ },
137
+ {
138
+ "epoch": 0.11312217194570136,
139
+ "grad_norm": 54.194664001464844,
140
+ "learning_rate": 5.608478209097404e-06,
141
+ "loss": 2.2279,
142
+ "step": 475
143
+ },
144
+ {
145
+ "epoch": 0.11907597046915933,
146
+ "grad_norm": 37.876121520996094,
147
+ "learning_rate": 5.906168135270303e-06,
148
+ "loss": 2.1245,
149
+ "step": 500
150
+ },
151
+ {
152
+ "epoch": 0.12502976899261728,
153
+ "grad_norm": 14.532075881958008,
154
+ "learning_rate": 6.203858061443201e-06,
155
+ "loss": 1.9028,
156
+ "step": 525
157
+ },
158
+ {
159
+ "epoch": 0.13098356751607526,
160
+ "grad_norm": 16.258657455444336,
161
+ "learning_rate": 6.5015479876161e-06,
162
+ "loss": 2.0243,
163
+ "step": 550
164
+ },
165
+ {
166
+ "epoch": 0.13693736603953321,
167
+ "grad_norm": 108.40977478027344,
168
+ "learning_rate": 6.7992379137889975e-06,
169
+ "loss": 2.0887,
170
+ "step": 575
171
+ },
172
+ {
173
+ "epoch": 0.1428911645629912,
174
+ "grad_norm": 21.468217849731445,
175
+ "learning_rate": 7.096927839961896e-06,
176
+ "loss": 1.957,
177
+ "step": 600
178
+ },
179
+ {
180
+ "epoch": 0.14884496308644915,
181
+ "grad_norm": 22.000276565551758,
182
+ "learning_rate": 7.394617766134794e-06,
183
+ "loss": 1.9793,
184
+ "step": 625
185
+ },
186
+ {
187
+ "epoch": 0.15479876160990713,
188
+ "grad_norm": 35.44562530517578,
189
+ "learning_rate": 7.692307692307694e-06,
190
+ "loss": 1.8144,
191
+ "step": 650
192
+ },
193
+ {
194
+ "epoch": 0.16075256013336509,
195
+ "grad_norm": 12.766996383666992,
196
+ "learning_rate": 7.989997618480591e-06,
197
+ "loss": 1.9096,
198
+ "step": 675
199
+ },
200
+ {
201
+ "epoch": 0.16670635865682304,
202
+ "grad_norm": 15.658439636230469,
203
+ "learning_rate": 8.28768754465349e-06,
204
+ "loss": 1.9173,
205
+ "step": 700
206
+ },
207
+ {
208
+ "epoch": 0.17266015718028102,
209
+ "grad_norm": 20.358219146728516,
210
+ "learning_rate": 8.585377470826387e-06,
211
+ "loss": 1.9496,
212
+ "step": 725
213
+ },
214
+ {
215
+ "epoch": 0.17861395570373897,
216
+ "grad_norm": 80.19808959960938,
217
+ "learning_rate": 8.883067396999285e-06,
218
+ "loss": 1.976,
219
+ "step": 750
220
+ },
221
+ {
222
+ "epoch": 0.18456775422719696,
223
+ "grad_norm": 17.19704818725586,
224
+ "learning_rate": 9.180757323172185e-06,
225
+ "loss": 1.9514,
226
+ "step": 775
227
+ },
228
+ {
229
+ "epoch": 0.1905215527506549,
230
+ "grad_norm": 12.535808563232422,
231
+ "learning_rate": 9.478447249345082e-06,
232
+ "loss": 1.9032,
233
+ "step": 800
234
+ },
235
+ {
236
+ "epoch": 0.1964753512741129,
237
+ "grad_norm": 10.427902221679688,
238
+ "learning_rate": 9.776137175517982e-06,
239
+ "loss": 1.8516,
240
+ "step": 825
241
+ },
242
+ {
243
+ "epoch": 0.20242914979757085,
244
+ "grad_norm": 12.50812816619873,
245
+ "learning_rate": 1.0073827101690878e-05,
246
+ "loss": 1.8606,
247
+ "step": 850
248
+ },
249
+ {
250
+ "epoch": 0.20838294832102883,
251
+ "grad_norm": 14.580496788024902,
252
+ "learning_rate": 1.0371517027863778e-05,
253
+ "loss": 1.8315,
254
+ "step": 875
255
+ },
256
+ {
257
+ "epoch": 0.21433674684448678,
258
+ "grad_norm": 11.818653106689453,
259
+ "learning_rate": 1.0669206954036676e-05,
260
+ "loss": 1.8991,
261
+ "step": 900
262
+ },
263
+ {
264
+ "epoch": 0.22029054536794476,
265
+ "grad_norm": 8.989337921142578,
266
+ "learning_rate": 1.095498928316266e-05,
267
+ "loss": 1.9776,
268
+ "step": 925
269
+ },
270
+ {
271
+ "epoch": 0.22624434389140272,
272
+ "grad_norm": 20.0020809173584,
273
+ "learning_rate": 1.1252679209335557e-05,
274
+ "loss": 1.7,
275
+ "step": 950
276
+ },
277
+ {
278
+ "epoch": 0.23219814241486067,
279
+ "grad_norm": 12.898697853088379,
280
+ "learning_rate": 1.1550369135508455e-05,
281
+ "loss": 1.7779,
282
+ "step": 975
283
+ },
284
+ {
285
+ "epoch": 0.23815194093831865,
286
+ "grad_norm": 66.27762603759766,
287
+ "learning_rate": 1.1848059061681353e-05,
288
+ "loss": 1.6585,
289
+ "step": 1000
290
+ },
291
+ {
292
+ "epoch": 0.2441057394617766,
293
+ "grad_norm": 23.931554794311523,
294
+ "learning_rate": 1.2145748987854251e-05,
295
+ "loss": 1.9537,
296
+ "step": 1025
297
+ },
298
+ {
299
+ "epoch": 0.25005953798523456,
300
+ "grad_norm": 9.655719757080078,
301
+ "learning_rate": 1.244343891402715e-05,
302
+ "loss": 1.6686,
303
+ "step": 1050
304
+ },
305
+ {
306
+ "epoch": 0.25601333650869257,
307
+ "grad_norm": 22.155933380126953,
308
+ "learning_rate": 1.2741128840200048e-05,
309
+ "loss": 1.6213,
310
+ "step": 1075
311
+ },
312
+ {
313
+ "epoch": 0.2619671350321505,
314
+ "grad_norm": 14.825516700744629,
315
+ "learning_rate": 1.3038818766372946e-05,
316
+ "loss": 1.5839,
317
+ "step": 1100
318
+ },
319
+ {
320
+ "epoch": 0.2679209335556085,
321
+ "grad_norm": 14.32667350769043,
322
+ "learning_rate": 1.3336508692545846e-05,
323
+ "loss": 1.5199,
324
+ "step": 1125
325
+ },
326
+ {
327
+ "epoch": 0.27387473207906643,
328
+ "grad_norm": 26.93867301940918,
329
+ "learning_rate": 1.3634198618718744e-05,
330
+ "loss": 1.5073,
331
+ "step": 1150
332
+ },
333
+ {
334
+ "epoch": 0.2798285306025244,
335
+ "grad_norm": 14.591392517089844,
336
+ "learning_rate": 1.3931888544891641e-05,
337
+ "loss": 1.5835,
338
+ "step": 1175
339
+ },
340
+ {
341
+ "epoch": 0.2857823291259824,
342
+ "grad_norm": 15.677992820739746,
343
+ "learning_rate": 1.422957847106454e-05,
344
+ "loss": 1.4797,
345
+ "step": 1200
346
+ },
347
+ {
348
+ "epoch": 0.29173612764944035,
349
+ "grad_norm": 27.357168197631836,
350
+ "learning_rate": 1.4527268397237437e-05,
351
+ "loss": 1.6069,
352
+ "step": 1225
353
+ },
354
+ {
355
+ "epoch": 0.2976899261728983,
356
+ "grad_norm": 13.1478271484375,
357
+ "learning_rate": 1.4824958323410337e-05,
358
+ "loss": 1.5262,
359
+ "step": 1250
360
+ },
361
+ {
362
+ "epoch": 0.30364372469635625,
363
+ "grad_norm": 30.049888610839844,
364
+ "learning_rate": 1.5122648249583235e-05,
365
+ "loss": 1.4262,
366
+ "step": 1275
367
+ },
368
+ {
369
+ "epoch": 0.30959752321981426,
370
+ "grad_norm": 16.124225616455078,
371
+ "learning_rate": 1.5420338175756134e-05,
372
+ "loss": 1.4468,
373
+ "step": 1300
374
+ },
375
+ {
376
+ "epoch": 0.3155513217432722,
377
+ "grad_norm": 7.2687201499938965,
378
+ "learning_rate": 1.5718028101929032e-05,
379
+ "loss": 1.4336,
380
+ "step": 1325
381
+ },
382
+ {
383
+ "epoch": 0.32150512026673017,
384
+ "grad_norm": 13.536001205444336,
385
+ "learning_rate": 1.601571802810193e-05,
386
+ "loss": 1.538,
387
+ "step": 1350
388
+ },
389
+ {
390
+ "epoch": 0.3274589187901881,
391
+ "grad_norm": 12.899219512939453,
392
+ "learning_rate": 1.6313407954274828e-05,
393
+ "loss": 1.4305,
394
+ "step": 1375
395
+ },
396
+ {
397
+ "epoch": 0.3334127173136461,
398
+ "grad_norm": 10.809173583984375,
399
+ "learning_rate": 1.6611097880447726e-05,
400
+ "loss": 1.4204,
401
+ "step": 1400
402
+ },
403
+ {
404
+ "epoch": 0.3393665158371041,
405
+ "grad_norm": 24.460115432739258,
406
+ "learning_rate": 1.6908787806620623e-05,
407
+ "loss": 1.4973,
408
+ "step": 1425
409
+ },
410
+ {
411
+ "epoch": 0.34532031436056204,
412
+ "grad_norm": 11.75060749053955,
413
+ "learning_rate": 1.720647773279352e-05,
414
+ "loss": 1.4842,
415
+ "step": 1450
416
+ },
417
+ {
418
+ "epoch": 0.35127411288402,
419
+ "grad_norm": 10.996878623962402,
420
+ "learning_rate": 1.7504167658966422e-05,
421
+ "loss": 1.5807,
422
+ "step": 1475
423
+ },
424
+ {
425
+ "epoch": 0.35722791140747795,
426
+ "grad_norm": 7.269287109375,
427
+ "learning_rate": 1.780185758513932e-05,
428
+ "loss": 1.5171,
429
+ "step": 1500
430
+ },
431
+ {
432
+ "epoch": 0.36318170993093596,
433
+ "grad_norm": 68.14170837402344,
434
+ "learning_rate": 1.8099547511312218e-05,
435
+ "loss": 1.3387,
436
+ "step": 1525
437
+ },
438
+ {
439
+ "epoch": 0.3691355084543939,
440
+ "grad_norm": 11.084012985229492,
441
+ "learning_rate": 1.8397237437485116e-05,
442
+ "loss": 1.4908,
443
+ "step": 1550
444
+ },
445
+ {
446
+ "epoch": 0.37508930697785187,
447
+ "grad_norm": 20.108646392822266,
448
+ "learning_rate": 1.8694927363658017e-05,
449
+ "loss": 1.3158,
450
+ "step": 1575
451
+ },
452
+ {
453
+ "epoch": 0.3810431055013098,
454
+ "grad_norm": 11.305046081542969,
455
+ "learning_rate": 1.8992617289830915e-05,
456
+ "loss": 1.3865,
457
+ "step": 1600
458
+ },
459
+ {
460
+ "epoch": 0.38699690402476783,
461
+ "grad_norm": 15.740647315979004,
462
+ "learning_rate": 1.929030721600381e-05,
463
+ "loss": 1.4981,
464
+ "step": 1625
465
+ },
466
+ {
467
+ "epoch": 0.3929507025482258,
468
+ "grad_norm": 16.77447509765625,
469
+ "learning_rate": 1.9587997142176708e-05,
470
+ "loss": 1.286,
471
+ "step": 1650
472
+ },
473
+ {
474
+ "epoch": 0.39890450107168374,
475
+ "grad_norm": 21.543420791625977,
476
+ "learning_rate": 1.988568706834961e-05,
477
+ "loss": 1.4452,
478
+ "step": 1675
479
+ },
480
+ {
481
+ "epoch": 0.4048582995951417,
482
+ "grad_norm": 12.779708862304688,
483
+ "learning_rate": 2.0183376994522507e-05,
484
+ "loss": 1.3408,
485
+ "step": 1700
486
+ },
487
+ {
488
+ "epoch": 0.41081209811859964,
489
+ "grad_norm": 10.406266212463379,
490
+ "learning_rate": 2.0481066920695404e-05,
491
+ "loss": 1.3697,
492
+ "step": 1725
493
+ },
494
+ {
495
+ "epoch": 0.41676589664205765,
496
+ "grad_norm": 5.458737850189209,
497
+ "learning_rate": 2.0778756846868302e-05,
498
+ "loss": 1.337,
499
+ "step": 1750
500
+ },
501
+ {
502
+ "epoch": 0.4227196951655156,
503
+ "grad_norm": 14.178915023803711,
504
+ "learning_rate": 2.10764467730412e-05,
505
+ "loss": 1.4753,
506
+ "step": 1775
507
+ },
508
+ {
509
+ "epoch": 0.42867349368897356,
510
+ "grad_norm": 9.677695274353027,
511
+ "learning_rate": 2.13741366992141e-05,
512
+ "loss": 1.3992,
513
+ "step": 1800
514
+ },
515
+ {
516
+ "epoch": 0.4346272922124315,
517
+ "grad_norm": 13.21867847442627,
518
+ "learning_rate": 2.1671826625387e-05,
519
+ "loss": 1.4931,
520
+ "step": 1825
521
+ },
522
+ {
523
+ "epoch": 0.4405810907358895,
524
+ "grad_norm": 14.22286319732666,
525
+ "learning_rate": 2.1969516551559897e-05,
526
+ "loss": 1.3421,
527
+ "step": 1850
528
+ },
529
+ {
530
+ "epoch": 0.4465348892593475,
531
+ "grad_norm": 9.203393936157227,
532
+ "learning_rate": 2.2267206477732795e-05,
533
+ "loss": 1.5444,
534
+ "step": 1875
535
+ },
536
+ {
537
+ "epoch": 0.45248868778280543,
538
+ "grad_norm": 11.072060585021973,
539
+ "learning_rate": 2.2564896403905693e-05,
540
+ "loss": 1.2771,
541
+ "step": 1900
542
+ },
543
+ {
544
+ "epoch": 0.4584424863062634,
545
+ "grad_norm": 16.251949310302734,
546
+ "learning_rate": 2.286258633007859e-05,
547
+ "loss": 1.3691,
548
+ "step": 1925
549
+ },
550
+ {
551
+ "epoch": 0.46439628482972134,
552
+ "grad_norm": 13.69911003112793,
553
+ "learning_rate": 2.316027625625149e-05,
554
+ "loss": 1.4421,
555
+ "step": 1950
556
+ },
557
+ {
558
+ "epoch": 0.47035008335317935,
559
+ "grad_norm": 13.465357780456543,
560
+ "learning_rate": 2.3457966182424386e-05,
561
+ "loss": 1.3384,
562
+ "step": 1975
563
+ },
564
+ {
565
+ "epoch": 0.4763038818766373,
566
+ "grad_norm": 10.023835182189941,
567
+ "learning_rate": 2.3755656108597284e-05,
568
+ "loss": 1.4222,
569
+ "step": 2000
570
+ },
571
+ {
572
+ "epoch": 0.48225768040009526,
573
+ "grad_norm": 13.331511497497559,
574
+ "learning_rate": 2.4053346034770186e-05,
575
+ "loss": 1.3014,
576
+ "step": 2025
577
+ },
578
+ {
579
+ "epoch": 0.4882114789235532,
580
+ "grad_norm": 20.458698272705078,
581
+ "learning_rate": 2.4351035960943083e-05,
582
+ "loss": 1.2347,
583
+ "step": 2050
584
+ },
585
+ {
586
+ "epoch": 0.4941652774470112,
587
+ "grad_norm": 20.585613250732422,
588
+ "learning_rate": 2.464872588711598e-05,
589
+ "loss": 1.3323,
590
+ "step": 2075
591
+ },
592
+ {
593
+ "epoch": 0.5001190759704691,
594
+ "grad_norm": 11.395628929138184,
595
+ "learning_rate": 2.494641581328888e-05,
596
+ "loss": 1.3655,
597
+ "step": 2100
598
+ },
599
+ {
600
+ "epoch": 0.5060728744939271,
601
+ "grad_norm": 12.15377426147461,
602
+ "learning_rate": 2.5244105739461777e-05,
603
+ "loss": 1.379,
604
+ "step": 2125
605
+ },
606
+ {
607
+ "epoch": 0.5120266730173851,
608
+ "grad_norm": 8.270852088928223,
609
+ "learning_rate": 2.5541795665634678e-05,
610
+ "loss": 1.2554,
611
+ "step": 2150
612
+ },
613
+ {
614
+ "epoch": 0.517980471540843,
615
+ "grad_norm": 10.243392944335938,
616
+ "learning_rate": 2.5839485591807573e-05,
617
+ "loss": 1.3906,
618
+ "step": 2175
619
+ },
620
+ {
621
+ "epoch": 0.523934270064301,
622
+ "grad_norm": 8.929010391235352,
623
+ "learning_rate": 2.6137175517980474e-05,
624
+ "loss": 1.1221,
625
+ "step": 2200
626
+ },
627
+ {
628
+ "epoch": 0.5298880685877589,
629
+ "grad_norm": 10.393013000488281,
630
+ "learning_rate": 2.6434865444153372e-05,
631
+ "loss": 1.1051,
632
+ "step": 2225
633
+ },
634
+ {
635
+ "epoch": 0.535841867111217,
636
+ "grad_norm": 8.302331924438477,
637
+ "learning_rate": 2.6732555370326266e-05,
638
+ "loss": 1.1744,
639
+ "step": 2250
640
+ },
641
+ {
642
+ "epoch": 0.541795665634675,
643
+ "grad_norm": 10.062777519226074,
644
+ "learning_rate": 2.7030245296499168e-05,
645
+ "loss": 1.3811,
646
+ "step": 2275
647
+ },
648
+ {
649
+ "epoch": 0.5477494641581329,
650
+ "grad_norm": 7.108260154724121,
651
+ "learning_rate": 2.732793522267207e-05,
652
+ "loss": 1.379,
653
+ "step": 2300
654
+ },
655
+ {
656
+ "epoch": 0.5537032626815909,
657
+ "grad_norm": 9.144970893859863,
658
+ "learning_rate": 2.7625625148844963e-05,
659
+ "loss": 1.3468,
660
+ "step": 2325
661
+ },
662
+ {
663
+ "epoch": 0.5596570612050488,
664
+ "grad_norm": 15.900981903076172,
665
+ "learning_rate": 2.7923315075017864e-05,
666
+ "loss": 1.2959,
667
+ "step": 2350
668
+ },
669
+ {
670
+ "epoch": 0.5656108597285068,
671
+ "grad_norm": 8.684926986694336,
672
+ "learning_rate": 2.822100500119076e-05,
673
+ "loss": 1.3474,
674
+ "step": 2375
675
+ },
676
+ {
677
+ "epoch": 0.5715646582519648,
678
+ "grad_norm": 10.27397346496582,
679
+ "learning_rate": 2.851869492736366e-05,
680
+ "loss": 1.41,
681
+ "step": 2400
682
+ },
683
+ {
684
+ "epoch": 0.5775184567754227,
685
+ "grad_norm": 1371.0750732421875,
686
+ "learning_rate": 2.881638485353656e-05,
687
+ "loss": 1.3659,
688
+ "step": 2425
689
+ },
690
+ {
691
+ "epoch": 0.5834722552988807,
692
+ "grad_norm": 24.350120544433594,
693
+ "learning_rate": 2.9114074779709456e-05,
694
+ "loss": 1.6975,
695
+ "step": 2450
696
+ },
697
+ {
698
+ "epoch": 0.5894260538223387,
699
+ "grad_norm": 8.823434829711914,
700
+ "learning_rate": 2.9411764705882354e-05,
701
+ "loss": 1.2569,
702
+ "step": 2475
703
+ },
704
+ {
705
+ "epoch": 0.5953798523457966,
706
+ "grad_norm": 10.762280464172363,
707
+ "learning_rate": 2.970945463205525e-05,
708
+ "loss": 1.3872,
709
+ "step": 2500
710
+ },
711
+ {
712
+ "epoch": 0.6013336508692546,
713
+ "grad_norm": 9.566682815551758,
714
+ "learning_rate": 3.000714455822815e-05,
715
+ "loss": 1.2105,
716
+ "step": 2525
717
+ },
718
+ {
719
+ "epoch": 0.6072874493927125,
720
+ "grad_norm": 9.695130348205566,
721
+ "learning_rate": 3.030483448440105e-05,
722
+ "loss": 1.3432,
723
+ "step": 2550
724
+ },
725
+ {
726
+ "epoch": 0.6132412479161705,
727
+ "grad_norm": 9.360527992248535,
728
+ "learning_rate": 3.060252441057395e-05,
729
+ "loss": 1.3176,
730
+ "step": 2575
731
+ },
732
+ {
733
+ "epoch": 0.6191950464396285,
734
+ "grad_norm": 8.286338806152344,
735
+ "learning_rate": 3.090021433674685e-05,
736
+ "loss": 1.116,
737
+ "step": 2600
738
+ },
739
+ {
740
+ "epoch": 0.6251488449630864,
741
+ "grad_norm": 12.3185453414917,
742
+ "learning_rate": 3.1197904262919744e-05,
743
+ "loss": 1.365,
744
+ "step": 2625
745
+ },
746
+ {
747
+ "epoch": 0.6311026434865444,
748
+ "grad_norm": 9.384039878845215,
749
+ "learning_rate": 3.149559418909264e-05,
750
+ "loss": 1.4109,
751
+ "step": 2650
752
+ },
753
+ {
754
+ "epoch": 0.6370564420100023,
755
+ "grad_norm": 5.262876033782959,
756
+ "learning_rate": 3.179328411526554e-05,
757
+ "loss": 1.2308,
758
+ "step": 2675
759
+ },
760
+ {
761
+ "epoch": 0.6430102405334603,
762
+ "grad_norm": 36.93815612792969,
763
+ "learning_rate": 3.2090974041438434e-05,
764
+ "loss": 1.3553,
765
+ "step": 2700
766
+ },
767
+ {
768
+ "epoch": 0.6489640390569184,
769
+ "grad_norm": 13.990893363952637,
770
+ "learning_rate": 3.2388663967611336e-05,
771
+ "loss": 1.236,
772
+ "step": 2725
773
+ },
774
+ {
775
+ "epoch": 0.6549178375803762,
776
+ "grad_norm": 8.869446754455566,
777
+ "learning_rate": 3.268635389378424e-05,
778
+ "loss": 1.3907,
779
+ "step": 2750
780
+ },
781
+ {
782
+ "epoch": 0.6608716361038343,
783
+ "grad_norm": 17.166791915893555,
784
+ "learning_rate": 3.298404381995713e-05,
785
+ "loss": 1.2713,
786
+ "step": 2775
787
+ },
788
+ {
789
+ "epoch": 0.6668254346272922,
790
+ "grad_norm": 12.74843978881836,
791
+ "learning_rate": 3.328173374613003e-05,
792
+ "loss": 1.3094,
793
+ "step": 2800
794
+ },
795
+ {
796
+ "epoch": 0.6727792331507502,
797
+ "grad_norm": 7.5820512771606445,
798
+ "learning_rate": 3.357942367230293e-05,
799
+ "loss": 1.0775,
800
+ "step": 2825
801
+ },
802
+ {
803
+ "epoch": 0.6787330316742082,
804
+ "grad_norm": 15.891313552856445,
805
+ "learning_rate": 3.387711359847583e-05,
806
+ "loss": 1.3757,
807
+ "step": 2850
808
+ },
809
+ {
810
+ "epoch": 0.6846868301976661,
811
+ "grad_norm": 11.398141860961914,
812
+ "learning_rate": 3.417480352464873e-05,
813
+ "loss": 1.2442,
814
+ "step": 2875
815
+ },
816
+ {
817
+ "epoch": 0.6906406287211241,
818
+ "grad_norm": 61.32708740234375,
819
+ "learning_rate": 3.4472493450821624e-05,
820
+ "loss": 1.1729,
821
+ "step": 2900
822
+ },
823
+ {
824
+ "epoch": 0.6965944272445821,
825
+ "grad_norm": 28.35745620727539,
826
+ "learning_rate": 3.4770183376994525e-05,
827
+ "loss": 1.337,
828
+ "step": 2925
829
+ },
830
+ {
831
+ "epoch": 0.70254822576804,
832
+ "grad_norm": 7.083935260772705,
833
+ "learning_rate": 3.506787330316742e-05,
834
+ "loss": 1.1934,
835
+ "step": 2950
836
+ },
837
+ {
838
+ "epoch": 0.708502024291498,
839
+ "grad_norm": 7.402154445648193,
840
+ "learning_rate": 3.536556322934032e-05,
841
+ "loss": 1.2614,
842
+ "step": 2975
843
+ },
844
+ {
845
+ "epoch": 0.7144558228149559,
846
+ "grad_norm": 56.07618713378906,
847
+ "learning_rate": 3.566325315551322e-05,
848
+ "loss": 1.1574,
849
+ "step": 3000
850
+ },
851
+ {
852
+ "epoch": 0.7204096213384139,
853
+ "grad_norm": 6.627118110656738,
854
+ "learning_rate": 3.596094308168612e-05,
855
+ "loss": 1.3132,
856
+ "step": 3025
857
+ },
858
+ {
859
+ "epoch": 0.7263634198618719,
860
+ "grad_norm": 10.03258228302002,
861
+ "learning_rate": 3.625863300785902e-05,
862
+ "loss": 1.268,
863
+ "step": 3050
864
+ },
865
+ {
866
+ "epoch": 0.7323172183853298,
867
+ "grad_norm": 10.155272483825684,
868
+ "learning_rate": 3.655632293403192e-05,
869
+ "loss": 1.2333,
870
+ "step": 3075
871
+ },
872
+ {
873
+ "epoch": 0.7382710169087878,
874
+ "grad_norm": 15.690239906311035,
875
+ "learning_rate": 3.6854012860204814e-05,
876
+ "loss": 1.1231,
877
+ "step": 3100
878
+ },
879
+ {
880
+ "epoch": 0.7442248154322457,
881
+ "grad_norm": 25.206863403320312,
882
+ "learning_rate": 3.7151702786377715e-05,
883
+ "loss": 1.0407,
884
+ "step": 3125
885
+ },
886
+ {
887
+ "epoch": 0.7501786139557037,
888
+ "grad_norm": 9.354549407958984,
889
+ "learning_rate": 3.744939271255061e-05,
890
+ "loss": 1.2782,
891
+ "step": 3150
892
+ },
893
+ {
894
+ "epoch": 0.7561324124791617,
895
+ "grad_norm": 4.694754123687744,
896
+ "learning_rate": 3.774708263872351e-05,
897
+ "loss": 1.1833,
898
+ "step": 3175
899
+ },
900
+ {
901
+ "epoch": 0.7620862110026196,
902
+ "grad_norm": 6.081759929656982,
903
+ "learning_rate": 3.8044772564896405e-05,
904
+ "loss": 1.2752,
905
+ "step": 3200
906
+ },
907
+ {
908
+ "epoch": 0.7680400095260776,
909
+ "grad_norm": 10.599677085876465,
910
+ "learning_rate": 3.8342462491069306e-05,
911
+ "loss": 1.151,
912
+ "step": 3225
913
+ },
914
+ {
915
+ "epoch": 0.7739938080495357,
916
+ "grad_norm": 12.136550903320312,
917
+ "learning_rate": 3.86401524172422e-05,
918
+ "loss": 1.2274,
919
+ "step": 3250
920
+ },
921
+ {
922
+ "epoch": 0.7799476065729936,
923
+ "grad_norm": 9.010398864746094,
924
+ "learning_rate": 3.8937842343415095e-05,
925
+ "loss": 1.4731,
926
+ "step": 3275
927
+ },
928
+ {
929
+ "epoch": 0.7859014050964516,
930
+ "grad_norm": 8.991976737976074,
931
+ "learning_rate": 3.9235532269588e-05,
932
+ "loss": 1.2017,
933
+ "step": 3300
934
+ },
935
+ {
936
+ "epoch": 0.7918552036199095,
937
+ "grad_norm": 7.551704406738281,
938
+ "learning_rate": 3.95332221957609e-05,
939
+ "loss": 1.3538,
940
+ "step": 3325
941
+ },
942
+ {
943
+ "epoch": 0.7978090021433675,
944
+ "grad_norm": 7.305477142333984,
945
+ "learning_rate": 3.983091212193379e-05,
946
+ "loss": 1.2053,
947
+ "step": 3350
948
+ },
949
+ {
950
+ "epoch": 0.8037628006668255,
951
+ "grad_norm": 8.888703346252441,
952
+ "learning_rate": 4.0128602048106694e-05,
953
+ "loss": 1.3227,
954
+ "step": 3375
955
+ },
956
+ {
957
+ "epoch": 0.8097165991902834,
958
+ "grad_norm": 12.521051406860352,
959
+ "learning_rate": 4.042629197427959e-05,
960
+ "loss": 1.0516,
961
+ "step": 3400
962
+ },
963
+ {
964
+ "epoch": 0.8156703977137414,
965
+ "grad_norm": 7.238898277282715,
966
+ "learning_rate": 4.072398190045249e-05,
967
+ "loss": 1.1264,
968
+ "step": 3425
969
+ },
970
+ {
971
+ "epoch": 0.8216241962371993,
972
+ "grad_norm": 10.125123977661133,
973
+ "learning_rate": 4.102167182662539e-05,
974
+ "loss": 1.2358,
975
+ "step": 3450
976
+ },
977
+ {
978
+ "epoch": 0.8275779947606573,
979
+ "grad_norm": 8.186086654663086,
980
+ "learning_rate": 4.1319361752798285e-05,
981
+ "loss": 1.0426,
982
+ "step": 3475
983
+ },
984
+ {
985
+ "epoch": 0.8335317932841153,
986
+ "grad_norm": 10.613844871520996,
987
+ "learning_rate": 4.1617051678971186e-05,
988
+ "loss": 1.3352,
989
+ "step": 3500
990
+ },
991
+ {
992
+ "epoch": 0.8394855918075732,
993
+ "grad_norm": 8.161314964294434,
994
+ "learning_rate": 4.191474160514409e-05,
995
+ "loss": 0.9903,
996
+ "step": 3525
997
+ },
998
+ {
999
+ "epoch": 0.8454393903310312,
1000
+ "grad_norm": 12.542242050170898,
1001
+ "learning_rate": 4.221243153131698e-05,
1002
+ "loss": 1.2685,
1003
+ "step": 3550
1004
+ },
1005
+ {
1006
+ "epoch": 0.8513931888544891,
1007
+ "grad_norm": 7.340167999267578,
1008
+ "learning_rate": 4.251012145748988e-05,
1009
+ "loss": 1.0443,
1010
+ "step": 3575
1011
+ },
1012
+ {
1013
+ "epoch": 0.8573469873779471,
1014
+ "grad_norm": 9.599905967712402,
1015
+ "learning_rate": 4.280781138366278e-05,
1016
+ "loss": 1.2975,
1017
+ "step": 3600
1018
+ },
1019
+ {
1020
+ "epoch": 0.8633007859014051,
1021
+ "grad_norm": 20.305580139160156,
1022
+ "learning_rate": 4.310550130983568e-05,
1023
+ "loss": 1.1385,
1024
+ "step": 3625
1025
+ },
1026
+ {
1027
+ "epoch": 0.869254584424863,
1028
+ "grad_norm": 8.58128833770752,
1029
+ "learning_rate": 4.340319123600858e-05,
1030
+ "loss": 1.2755,
1031
+ "step": 3650
1032
+ },
1033
+ {
1034
+ "epoch": 0.875208382948321,
1035
+ "grad_norm": 7.283664226531982,
1036
+ "learning_rate": 4.3700881162181475e-05,
1037
+ "loss": 1.2377,
1038
+ "step": 3675
1039
+ },
1040
+ {
1041
+ "epoch": 0.881162181471779,
1042
+ "grad_norm": 10.471671104431152,
1043
+ "learning_rate": 4.3998571088354376e-05,
1044
+ "loss": 1.274,
1045
+ "step": 3700
1046
+ },
1047
+ {
1048
+ "epoch": 0.887115979995237,
1049
+ "grad_norm": 11.106354713439941,
1050
+ "learning_rate": 4.429626101452727e-05,
1051
+ "loss": 1.3215,
1052
+ "step": 3725
1053
+ },
1054
+ {
1055
+ "epoch": 0.893069778518695,
1056
+ "grad_norm": 8.006999015808105,
1057
+ "learning_rate": 4.459395094070017e-05,
1058
+ "loss": 1.2153,
1059
+ "step": 3750
1060
+ },
1061
+ {
1062
+ "epoch": 0.8990235770421529,
1063
+ "grad_norm": 12.414172172546387,
1064
+ "learning_rate": 4.4891640866873066e-05,
1065
+ "loss": 1.1706,
1066
+ "step": 3775
1067
+ },
1068
+ {
1069
+ "epoch": 0.9049773755656109,
1070
+ "grad_norm": 8.146134376525879,
1071
+ "learning_rate": 4.518933079304597e-05,
1072
+ "loss": 1.245,
1073
+ "step": 3800
1074
+ },
1075
+ {
1076
+ "epoch": 0.9109311740890689,
1077
+ "grad_norm": 11.50550651550293,
1078
+ "learning_rate": 4.548702071921886e-05,
1079
+ "loss": 1.2334,
1080
+ "step": 3825
1081
+ },
1082
+ {
1083
+ "epoch": 0.9168849726125268,
1084
+ "grad_norm": 5.041258335113525,
1085
+ "learning_rate": 4.5784710645391756e-05,
1086
+ "loss": 1.1547,
1087
+ "step": 3850
1088
+ },
1089
+ {
1090
+ "epoch": 0.9228387711359848,
1091
+ "grad_norm": 5.170175075531006,
1092
+ "learning_rate": 4.608240057156466e-05,
1093
+ "loss": 1.2673,
1094
+ "step": 3875
1095
+ },
1096
+ {
1097
+ "epoch": 0.9287925696594427,
1098
+ "grad_norm": 3.370030641555786,
1099
+ "learning_rate": 4.638009049773756e-05,
1100
+ "loss": 1.2161,
1101
+ "step": 3900
1102
+ },
1103
+ {
1104
+ "epoch": 0.9347463681829007,
1105
+ "grad_norm": 8.174607276916504,
1106
+ "learning_rate": 4.667778042391045e-05,
1107
+ "loss": 1.2999,
1108
+ "step": 3925
1109
+ },
1110
+ {
1111
+ "epoch": 0.9407001667063587,
1112
+ "grad_norm": 8.648737907409668,
1113
+ "learning_rate": 4.6975470350083354e-05,
1114
+ "loss": 1.1796,
1115
+ "step": 3950
1116
+ },
1117
+ {
1118
+ "epoch": 0.9466539652298166,
1119
+ "grad_norm": 4.100658893585205,
1120
+ "learning_rate": 4.7273160276256256e-05,
1121
+ "loss": 1.175,
1122
+ "step": 3975
1123
+ },
1124
+ {
1125
+ "epoch": 0.9526077637532746,
1126
+ "grad_norm": 8.320395469665527,
1127
+ "learning_rate": 4.757085020242915e-05,
1128
+ "loss": 1.2008,
1129
+ "step": 4000
1130
+ },
1131
+ {
1132
+ "epoch": 0.9585615622767325,
1133
+ "grad_norm": 8.804771423339844,
1134
+ "learning_rate": 4.786854012860205e-05,
1135
+ "loss": 1.2601,
1136
+ "step": 4025
1137
+ },
1138
+ {
1139
+ "epoch": 0.9645153608001905,
1140
+ "grad_norm": 6.604278564453125,
1141
+ "learning_rate": 4.8166230054774946e-05,
1142
+ "loss": 1.2309,
1143
+ "step": 4050
1144
+ },
1145
+ {
1146
+ "epoch": 0.9704691593236485,
1147
+ "grad_norm": 8.478154182434082,
1148
+ "learning_rate": 4.846391998094785e-05,
1149
+ "loss": 1.1459,
1150
+ "step": 4075
1151
+ },
1152
+ {
1153
+ "epoch": 0.9764229578471064,
1154
+ "grad_norm": 9.920053482055664,
1155
+ "learning_rate": 4.876160990712075e-05,
1156
+ "loss": 1.058,
1157
+ "step": 4100
1158
+ },
1159
+ {
1160
+ "epoch": 0.9823767563705644,
1161
+ "grad_norm": 7.725985050201416,
1162
+ "learning_rate": 4.905929983329364e-05,
1163
+ "loss": 1.2026,
1164
+ "step": 4125
1165
+ },
1166
+ {
1167
+ "epoch": 0.9883305548940224,
1168
+ "grad_norm": 6.322876930236816,
1169
+ "learning_rate": 4.9356989759466544e-05,
1170
+ "loss": 1.2808,
1171
+ "step": 4150
1172
+ },
1173
+ {
1174
+ "epoch": 0.9942843534174803,
1175
+ "grad_norm": 6.014225959777832,
1176
+ "learning_rate": 4.965467968563944e-05,
1177
+ "loss": 1.324,
1178
+ "step": 4175
1179
+ },
1180
+ {
1181
+ "epoch": 1.0,
1182
+ "eval_accuracy": 0.5966916577412829,
1183
+ "eval_f1_macro": 0.4947213544811146,
1184
+ "eval_f1_micro": 0.5966916577412829,
1185
+ "eval_f1_weighted": 0.5697156949348047,
1186
+ "eval_loss": 1.2080416679382324,
1187
+ "eval_precision_macro": 0.4954154541683826,
1188
+ "eval_precision_micro": 0.5966916577412829,
1189
+ "eval_precision_weighted": 0.5767902087270278,
1190
+ "eval_recall_macro": 0.5315721027405438,
1191
+ "eval_recall_micro": 0.5966916577412829,
1192
+ "eval_recall_weighted": 0.5966916577412829,
1193
+ "eval_runtime": 26.7531,
1194
+ "eval_samples_per_second": 314.095,
1195
+ "eval_steps_per_second": 19.661,
1196
+ "step": 4199
1197
+ },
1198
+ {
1199
+ "epoch": 1.0002381519409382,
1200
+ "grad_norm": 5.634415149688721,
1201
+ "learning_rate": 4.995236961181234e-05,
1202
+ "loss": 1.1611,
1203
+ "step": 4200
1204
+ },
1205
+ {
1206
+ "epoch": 1.0061919504643964,
1207
+ "grad_norm": 6.43670654296875,
1208
+ "learning_rate": 4.997221560689053e-05,
1209
+ "loss": 1.0431,
1210
+ "step": 4225
1211
+ },
1212
+ {
1213
+ "epoch": 1.0121457489878543,
1214
+ "grad_norm": 9.240684509277344,
1215
+ "learning_rate": 4.993913894842688e-05,
1216
+ "loss": 1.2435,
1217
+ "step": 4250
1218
+ },
1219
+ {
1220
+ "epoch": 1.0180995475113122,
1221
+ "grad_norm": 18.92078399658203,
1222
+ "learning_rate": 4.9906062289963226e-05,
1223
+ "loss": 1.1391,
1224
+ "step": 4275
1225
+ },
1226
+ {
1227
+ "epoch": 1.0240533460347703,
1228
+ "grad_norm": 7.0122833251953125,
1229
+ "learning_rate": 4.9872985631499567e-05,
1230
+ "loss": 1.1175,
1231
+ "step": 4300
1232
+ },
1233
+ {
1234
+ "epoch": 1.0300071445582282,
1235
+ "grad_norm": 8.408838272094727,
1236
+ "learning_rate": 4.983990897303591e-05,
1237
+ "loss": 1.1911,
1238
+ "step": 4325
1239
+ },
1240
+ {
1241
+ "epoch": 1.035960943081686,
1242
+ "grad_norm": 11.10263442993164,
1243
+ "learning_rate": 4.9806832314572254e-05,
1244
+ "loss": 1.1391,
1245
+ "step": 4350
1246
+ },
1247
+ {
1248
+ "epoch": 1.0419147416051442,
1249
+ "grad_norm": 10.539508819580078,
1250
+ "learning_rate": 4.97737556561086e-05,
1251
+ "loss": 1.1194,
1252
+ "step": 4375
1253
+ },
1254
+ {
1255
+ "epoch": 1.047868540128602,
1256
+ "grad_norm": 5.538053512573242,
1257
+ "learning_rate": 4.974067899764495e-05,
1258
+ "loss": 1.0865,
1259
+ "step": 4400
1260
+ },
1261
+ {
1262
+ "epoch": 1.05382233865206,
1263
+ "grad_norm": 6.905600547790527,
1264
+ "learning_rate": 4.970760233918128e-05,
1265
+ "loss": 1.0655,
1266
+ "step": 4425
1267
+ },
1268
+ {
1269
+ "epoch": 1.0597761371755179,
1270
+ "grad_norm": 11.629922866821289,
1271
+ "learning_rate": 4.967452568071763e-05,
1272
+ "loss": 1.0756,
1273
+ "step": 4450
1274
+ },
1275
+ {
1276
+ "epoch": 1.065729935698976,
1277
+ "grad_norm": 7.342074394226074,
1278
+ "learning_rate": 4.964144902225398e-05,
1279
+ "loss": 1.0758,
1280
+ "step": 4475
1281
+ },
1282
+ {
1283
+ "epoch": 1.071683734222434,
1284
+ "grad_norm": 11.207491874694824,
1285
+ "learning_rate": 4.9608372363790325e-05,
1286
+ "loss": 1.0862,
1287
+ "step": 4500
1288
+ },
1289
+ {
1290
+ "epoch": 1.0776375327458918,
1291
+ "grad_norm": 4.95857048034668,
1292
+ "learning_rate": 4.9575295705326665e-05,
1293
+ "loss": 1.1427,
1294
+ "step": 4525
1295
+ },
1296
+ {
1297
+ "epoch": 1.08359133126935,
1298
+ "grad_norm": 8.11075496673584,
1299
+ "learning_rate": 4.954221904686301e-05,
1300
+ "loss": 1.1302,
1301
+ "step": 4550
1302
+ },
1303
+ {
1304
+ "epoch": 1.0895451297928078,
1305
+ "grad_norm": 6.039332389831543,
1306
+ "learning_rate": 4.950914238839935e-05,
1307
+ "loss": 1.1051,
1308
+ "step": 4575
1309
+ },
1310
+ {
1311
+ "epoch": 1.0954989283162657,
1312
+ "grad_norm": 6.331399440765381,
1313
+ "learning_rate": 4.94760657299357e-05,
1314
+ "loss": 1.0596,
1315
+ "step": 4600
1316
+ },
1317
+ {
1318
+ "epoch": 1.1014527268397238,
1319
+ "grad_norm": 7.124811172485352,
1320
+ "learning_rate": 4.944298907147205e-05,
1321
+ "loss": 1.0274,
1322
+ "step": 4625
1323
+ },
1324
+ {
1325
+ "epoch": 1.1074065253631817,
1326
+ "grad_norm": 11.322517395019531,
1327
+ "learning_rate": 4.940991241300839e-05,
1328
+ "loss": 1.0636,
1329
+ "step": 4650
1330
+ },
1331
+ {
1332
+ "epoch": 1.1133603238866396,
1333
+ "grad_norm": 4.297452449798584,
1334
+ "learning_rate": 4.9376835754544736e-05,
1335
+ "loss": 1.0874,
1336
+ "step": 4675
1337
+ },
1338
+ {
1339
+ "epoch": 1.1193141224100978,
1340
+ "grad_norm": 7.16676664352417,
1341
+ "learning_rate": 4.934375909608108e-05,
1342
+ "loss": 1.014,
1343
+ "step": 4700
1344
+ },
1345
+ {
1346
+ "epoch": 1.1252679209335557,
1347
+ "grad_norm": 5.3235182762146,
1348
+ "learning_rate": 4.9310682437617424e-05,
1349
+ "loss": 1.0657,
1350
+ "step": 4725
1351
+ },
1352
+ {
1353
+ "epoch": 1.1312217194570136,
1354
+ "grad_norm": 9.627723693847656,
1355
+ "learning_rate": 4.927760577915377e-05,
1356
+ "loss": 1.0914,
1357
+ "step": 4750
1358
+ },
1359
+ {
1360
+ "epoch": 1.1371755179804715,
1361
+ "grad_norm": 9.456254005432129,
1362
+ "learning_rate": 4.924452912069011e-05,
1363
+ "loss": 1.1473,
1364
+ "step": 4775
1365
+ },
1366
+ {
1367
+ "epoch": 1.1431293165039296,
1368
+ "grad_norm": 24.08217430114746,
1369
+ "learning_rate": 4.921145246222646e-05,
1370
+ "loss": 0.9844,
1371
+ "step": 4800
1372
+ },
1373
+ {
1374
+ "epoch": 1.1490831150273875,
1375
+ "grad_norm": 9.077038764953613,
1376
+ "learning_rate": 4.9178375803762806e-05,
1377
+ "loss": 1.0039,
1378
+ "step": 4825
1379
+ },
1380
+ {
1381
+ "epoch": 1.1550369135508454,
1382
+ "grad_norm": 9.616080284118652,
1383
+ "learning_rate": 4.9145299145299147e-05,
1384
+ "loss": 1.0242,
1385
+ "step": 4850
1386
+ },
1387
+ {
1388
+ "epoch": 1.1609907120743035,
1389
+ "grad_norm": 7.154841423034668,
1390
+ "learning_rate": 4.9112222486835494e-05,
1391
+ "loss": 1.0895,
1392
+ "step": 4875
1393
+ },
1394
+ {
1395
+ "epoch": 1.1669445105977614,
1396
+ "grad_norm": 9.329425811767578,
1397
+ "learning_rate": 4.9079145828371834e-05,
1398
+ "loss": 1.1656,
1399
+ "step": 4900
1400
+ },
1401
+ {
1402
+ "epoch": 1.1728983091212193,
1403
+ "grad_norm": 8.157685279846191,
1404
+ "learning_rate": 4.904606916990818e-05,
1405
+ "loss": 1.0478,
1406
+ "step": 4925
1407
+ },
1408
+ {
1409
+ "epoch": 1.1788521076446772,
1410
+ "grad_norm": 9.068069458007812,
1411
+ "learning_rate": 4.901299251144453e-05,
1412
+ "loss": 1.0124,
1413
+ "step": 4950
1414
+ },
1415
+ {
1416
+ "epoch": 1.1848059061681353,
1417
+ "grad_norm": 6.130595684051514,
1418
+ "learning_rate": 4.8979915852980876e-05,
1419
+ "loss": 1.1044,
1420
+ "step": 4975
1421
+ },
1422
+ {
1423
+ "epoch": 1.1907597046915932,
1424
+ "grad_norm": 7.725107192993164,
1425
+ "learning_rate": 4.894683919451721e-05,
1426
+ "loss": 1.1225,
1427
+ "step": 5000
1428
+ },
1429
+ {
1430
+ "epoch": 1.1967135032150513,
1431
+ "grad_norm": 102.51948547363281,
1432
+ "learning_rate": 4.891376253605356e-05,
1433
+ "loss": 1.1308,
1434
+ "step": 5025
1435
+ },
1436
+ {
1437
+ "epoch": 1.2026673017385092,
1438
+ "grad_norm": 3.6292245388031006,
1439
+ "learning_rate": 4.8880685877589905e-05,
1440
+ "loss": 0.9875,
1441
+ "step": 5050
1442
+ },
1443
+ {
1444
+ "epoch": 1.2086211002619671,
1445
+ "grad_norm": 8.71126937866211,
1446
+ "learning_rate": 4.884760921912625e-05,
1447
+ "loss": 1.0871,
1448
+ "step": 5075
1449
+ },
1450
+ {
1451
+ "epoch": 1.214574898785425,
1452
+ "grad_norm": 13.922643661499023,
1453
+ "learning_rate": 4.881453256066259e-05,
1454
+ "loss": 1.0194,
1455
+ "step": 5100
1456
+ },
1457
+ {
1458
+ "epoch": 1.2205286973088831,
1459
+ "grad_norm": 5.713569164276123,
1460
+ "learning_rate": 4.878145590219893e-05,
1461
+ "loss": 1.0232,
1462
+ "step": 5125
1463
+ },
1464
+ {
1465
+ "epoch": 1.226482495832341,
1466
+ "grad_norm": 8.962621688842773,
1467
+ "learning_rate": 4.874837924373528e-05,
1468
+ "loss": 1.1381,
1469
+ "step": 5150
1470
+ },
1471
+ {
1472
+ "epoch": 1.232436294355799,
1473
+ "grad_norm": 8.474043846130371,
1474
+ "learning_rate": 4.871530258527163e-05,
1475
+ "loss": 1.1649,
1476
+ "step": 5175
1477
+ },
1478
+ {
1479
+ "epoch": 1.238390092879257,
1480
+ "grad_norm": 6.489732265472412,
1481
+ "learning_rate": 4.8682225926807975e-05,
1482
+ "loss": 1.1688,
1483
+ "step": 5200
1484
+ },
1485
+ {
1486
+ "epoch": 1.244343891402715,
1487
+ "grad_norm": 8.985735893249512,
1488
+ "learning_rate": 4.8649149268344316e-05,
1489
+ "loss": 1.0109,
1490
+ "step": 5225
1491
+ },
1492
+ {
1493
+ "epoch": 1.2502976899261729,
1494
+ "grad_norm": 4.531099319458008,
1495
+ "learning_rate": 4.861607260988066e-05,
1496
+ "loss": 0.8664,
1497
+ "step": 5250
1498
+ },
1499
+ {
1500
+ "epoch": 1.2562514884496307,
1501
+ "grad_norm": 7.692714214324951,
1502
+ "learning_rate": 4.8582995951417004e-05,
1503
+ "loss": 0.993,
1504
+ "step": 5275
1505
+ },
1506
+ {
1507
+ "epoch": 1.2622052869730889,
1508
+ "grad_norm": 10.812503814697266,
1509
+ "learning_rate": 4.854991929295335e-05,
1510
+ "loss": 1.2124,
1511
+ "step": 5300
1512
+ },
1513
+ {
1514
+ "epoch": 1.2681590854965468,
1515
+ "grad_norm": 4.077712059020996,
1516
+ "learning_rate": 4.85168426344897e-05,
1517
+ "loss": 1.0187,
1518
+ "step": 5325
1519
+ },
1520
+ {
1521
+ "epoch": 1.2741128840200049,
1522
+ "grad_norm": 3.8732829093933105,
1523
+ "learning_rate": 4.848376597602604e-05,
1524
+ "loss": 1.0097,
1525
+ "step": 5350
1526
+ },
1527
+ {
1528
+ "epoch": 1.2800666825434628,
1529
+ "grad_norm": 16.588850021362305,
1530
+ "learning_rate": 4.8450689317562386e-05,
1531
+ "loss": 0.9223,
1532
+ "step": 5375
1533
+ },
1534
+ {
1535
+ "epoch": 1.2860204810669207,
1536
+ "grad_norm": 11.135323524475098,
1537
+ "learning_rate": 4.8418935725437275e-05,
1538
+ "loss": 1.3406,
1539
+ "step": 5400
1540
+ },
1541
+ {
1542
+ "epoch": 1.2919742795903786,
1543
+ "grad_norm": 10.437847137451172,
1544
+ "learning_rate": 4.838585906697362e-05,
1545
+ "loss": 1.0457,
1546
+ "step": 5425
1547
+ },
1548
+ {
1549
+ "epoch": 1.2979280781138367,
1550
+ "grad_norm": 5.263082981109619,
1551
+ "learning_rate": 4.835278240850997e-05,
1552
+ "loss": 1.1183,
1553
+ "step": 5450
1554
+ },
1555
+ {
1556
+ "epoch": 1.3038818766372946,
1557
+ "grad_norm": 34.92927169799805,
1558
+ "learning_rate": 4.831970575004631e-05,
1559
+ "loss": 0.9997,
1560
+ "step": 5475
1561
+ },
1562
+ {
1563
+ "epoch": 1.3098356751607525,
1564
+ "grad_norm": 5.171608924865723,
1565
+ "learning_rate": 4.828662909158265e-05,
1566
+ "loss": 0.9876,
1567
+ "step": 5500
1568
+ },
1569
+ {
1570
+ "epoch": 1.3157894736842106,
1571
+ "grad_norm": 8.702177047729492,
1572
+ "learning_rate": 4.8253552433119e-05,
1573
+ "loss": 1.1814,
1574
+ "step": 5525
1575
+ },
1576
+ {
1577
+ "epoch": 1.3217432722076685,
1578
+ "grad_norm": 5.145864963531494,
1579
+ "learning_rate": 4.8220475774655346e-05,
1580
+ "loss": 1.1428,
1581
+ "step": 5550
1582
+ },
1583
+ {
1584
+ "epoch": 1.3276970707311264,
1585
+ "grad_norm": 6.461662292480469,
1586
+ "learning_rate": 4.8187399116191686e-05,
1587
+ "loss": 0.9919,
1588
+ "step": 5575
1589
+ },
1590
+ {
1591
+ "epoch": 1.3336508692545843,
1592
+ "grad_norm": 8.598640441894531,
1593
+ "learning_rate": 4.8154322457728034e-05,
1594
+ "loss": 0.8621,
1595
+ "step": 5600
1596
+ },
1597
+ {
1598
+ "epoch": 1.3396046677780424,
1599
+ "grad_norm": 8.139167785644531,
1600
+ "learning_rate": 4.8121245799264374e-05,
1601
+ "loss": 1.0601,
1602
+ "step": 5625
1603
+ },
1604
+ {
1605
+ "epoch": 1.3455584663015003,
1606
+ "grad_norm": 10.176492691040039,
1607
+ "learning_rate": 4.808816914080072e-05,
1608
+ "loss": 1.0045,
1609
+ "step": 5650
1610
+ },
1611
+ {
1612
+ "epoch": 1.3515122648249585,
1613
+ "grad_norm": 8.443796157836914,
1614
+ "learning_rate": 4.805509248233707e-05,
1615
+ "loss": 1.1288,
1616
+ "step": 5675
1617
+ },
1618
+ {
1619
+ "epoch": 1.3574660633484164,
1620
+ "grad_norm": 4.203782081604004,
1621
+ "learning_rate": 4.802201582387341e-05,
1622
+ "loss": 1.0624,
1623
+ "step": 5700
1624
+ },
1625
+ {
1626
+ "epoch": 1.3634198618718743,
1627
+ "grad_norm": 5.534155368804932,
1628
+ "learning_rate": 4.798893916540976e-05,
1629
+ "loss": 1.1066,
1630
+ "step": 5725
1631
+ },
1632
+ {
1633
+ "epoch": 1.3693736603953321,
1634
+ "grad_norm": 7.605154991149902,
1635
+ "learning_rate": 4.7955862506946104e-05,
1636
+ "loss": 1.0867,
1637
+ "step": 5750
1638
+ },
1639
+ {
1640
+ "epoch": 1.3753274589187903,
1641
+ "grad_norm": 7.6452107429504395,
1642
+ "learning_rate": 4.7922785848482445e-05,
1643
+ "loss": 1.0049,
1644
+ "step": 5775
1645
+ },
1646
+ {
1647
+ "epoch": 1.3812812574422482,
1648
+ "grad_norm": 7.743738651275635,
1649
+ "learning_rate": 4.788970919001879e-05,
1650
+ "loss": 1.124,
1651
+ "step": 5800
1652
+ },
1653
+ {
1654
+ "epoch": 1.387235055965706,
1655
+ "grad_norm": 5.369507789611816,
1656
+ "learning_rate": 4.785663253155513e-05,
1657
+ "loss": 1.1801,
1658
+ "step": 5825
1659
+ },
1660
+ {
1661
+ "epoch": 1.3931888544891642,
1662
+ "grad_norm": 4.630309581756592,
1663
+ "learning_rate": 4.782355587309148e-05,
1664
+ "loss": 0.9165,
1665
+ "step": 5850
1666
+ },
1667
+ {
1668
+ "epoch": 1.399142653012622,
1669
+ "grad_norm": 15.509940147399902,
1670
+ "learning_rate": 4.779047921462783e-05,
1671
+ "loss": 0.9489,
1672
+ "step": 5875
1673
+ },
1674
+ {
1675
+ "epoch": 1.40509645153608,
1676
+ "grad_norm": 6.903754234313965,
1677
+ "learning_rate": 4.775740255616417e-05,
1678
+ "loss": 0.8236,
1679
+ "step": 5900
1680
+ },
1681
+ {
1682
+ "epoch": 1.4110502500595379,
1683
+ "grad_norm": 13.870335578918457,
1684
+ "learning_rate": 4.772432589770051e-05,
1685
+ "loss": 1.2049,
1686
+ "step": 5925
1687
+ },
1688
+ {
1689
+ "epoch": 1.417004048582996,
1690
+ "grad_norm": 7.386574745178223,
1691
+ "learning_rate": 4.7691249239236855e-05,
1692
+ "loss": 1.0842,
1693
+ "step": 5950
1694
+ },
1695
+ {
1696
+ "epoch": 1.422957847106454,
1697
+ "grad_norm": 9.026902198791504,
1698
+ "learning_rate": 4.76581725807732e-05,
1699
+ "loss": 1.1066,
1700
+ "step": 5975
1701
+ },
1702
+ {
1703
+ "epoch": 1.428911645629912,
1704
+ "grad_norm": 7.694133281707764,
1705
+ "learning_rate": 4.762509592230955e-05,
1706
+ "loss": 0.9639,
1707
+ "step": 6000
1708
+ },
1709
+ {
1710
+ "epoch": 1.43486544415337,
1711
+ "grad_norm": 7.63619327545166,
1712
+ "learning_rate": 4.759201926384589e-05,
1713
+ "loss": 1.1174,
1714
+ "step": 6025
1715
+ },
1716
+ {
1717
+ "epoch": 1.4408192426768278,
1718
+ "grad_norm": 11.331910133361816,
1719
+ "learning_rate": 4.755894260538223e-05,
1720
+ "loss": 1.0762,
1721
+ "step": 6050
1722
+ },
1723
+ {
1724
+ "epoch": 1.4467730412002857,
1725
+ "grad_norm": 5.851023197174072,
1726
+ "learning_rate": 4.752586594691858e-05,
1727
+ "loss": 1.1457,
1728
+ "step": 6075
1729
+ },
1730
+ {
1731
+ "epoch": 1.4527268397237436,
1732
+ "grad_norm": 5.016726016998291,
1733
+ "learning_rate": 4.7492789288454926e-05,
1734
+ "loss": 1.1557,
1735
+ "step": 6100
1736
+ },
1737
+ {
1738
+ "epoch": 1.4586806382472017,
1739
+ "grad_norm": 6.742854595184326,
1740
+ "learning_rate": 4.7461035696329815e-05,
1741
+ "loss": 1.0926,
1742
+ "step": 6125
1743
+ },
1744
+ {
1745
+ "epoch": 1.4646344367706596,
1746
+ "grad_norm": 7.604907989501953,
1747
+ "learning_rate": 4.742795903786616e-05,
1748
+ "loss": 1.1302,
1749
+ "step": 6150
1750
+ },
1751
+ {
1752
+ "epoch": 1.4705882352941178,
1753
+ "grad_norm": 17.53195571899414,
1754
+ "learning_rate": 4.73948823794025e-05,
1755
+ "loss": 1.0213,
1756
+ "step": 6175
1757
+ },
1758
+ {
1759
+ "epoch": 1.4765420338175756,
1760
+ "grad_norm": 6.960255146026611,
1761
+ "learning_rate": 4.736180572093885e-05,
1762
+ "loss": 1.1129,
1763
+ "step": 6200
1764
+ },
1765
+ {
1766
+ "epoch": 1.4824958323410335,
1767
+ "grad_norm": 10.175301551818848,
1768
+ "learning_rate": 4.73287290624752e-05,
1769
+ "loss": 1.2069,
1770
+ "step": 6225
1771
+ },
1772
+ {
1773
+ "epoch": 1.4884496308644914,
1774
+ "grad_norm": 10.815943717956543,
1775
+ "learning_rate": 4.729565240401154e-05,
1776
+ "loss": 1.1805,
1777
+ "step": 6250
1778
+ },
1779
+ {
1780
+ "epoch": 1.4944034293879496,
1781
+ "grad_norm": 2.322054386138916,
1782
+ "learning_rate": 4.7262575745547886e-05,
1783
+ "loss": 0.9646,
1784
+ "step": 6275
1785
+ },
1786
+ {
1787
+ "epoch": 1.5003572279114075,
1788
+ "grad_norm": 4.6621809005737305,
1789
+ "learning_rate": 4.7229499087084226e-05,
1790
+ "loss": 1.1626,
1791
+ "step": 6300
1792
+ },
1793
+ {
1794
+ "epoch": 1.5063110264348656,
1795
+ "grad_norm": 10.837236404418945,
1796
+ "learning_rate": 4.719642242862057e-05,
1797
+ "loss": 0.9685,
1798
+ "step": 6325
1799
+ },
1800
+ {
1801
+ "epoch": 1.5122648249583235,
1802
+ "grad_norm": 6.7249436378479,
1803
+ "learning_rate": 4.716334577015692e-05,
1804
+ "loss": 1.2021,
1805
+ "step": 6350
1806
+ },
1807
+ {
1808
+ "epoch": 1.5182186234817814,
1809
+ "grad_norm": 12.020308494567871,
1810
+ "learning_rate": 4.713026911169327e-05,
1811
+ "loss": 0.9582,
1812
+ "step": 6375
1813
+ },
1814
+ {
1815
+ "epoch": 1.5241724220052393,
1816
+ "grad_norm": 7.54368782043457,
1817
+ "learning_rate": 4.70971924532296e-05,
1818
+ "loss": 0.8267,
1819
+ "step": 6400
1820
+ },
1821
+ {
1822
+ "epoch": 1.5301262205286972,
1823
+ "grad_norm": 7.932629585266113,
1824
+ "learning_rate": 4.706411579476595e-05,
1825
+ "loss": 1.108,
1826
+ "step": 6425
1827
+ },
1828
+ {
1829
+ "epoch": 1.5360800190521553,
1830
+ "grad_norm": 5.615595817565918,
1831
+ "learning_rate": 4.7031039136302296e-05,
1832
+ "loss": 1.04,
1833
+ "step": 6450
1834
+ },
1835
+ {
1836
+ "epoch": 1.5420338175756132,
1837
+ "grad_norm": 6.620110988616943,
1838
+ "learning_rate": 4.6997962477838644e-05,
1839
+ "loss": 1.1267,
1840
+ "step": 6475
1841
+ },
1842
+ {
1843
+ "epoch": 1.5479876160990713,
1844
+ "grad_norm": 5.139624118804932,
1845
+ "learning_rate": 4.6964885819374984e-05,
1846
+ "loss": 1.1831,
1847
+ "step": 6500
1848
+ },
1849
+ {
1850
+ "epoch": 1.5539414146225292,
1851
+ "grad_norm": 12.21379566192627,
1852
+ "learning_rate": 4.693180916091133e-05,
1853
+ "loss": 0.9852,
1854
+ "step": 6525
1855
+ },
1856
+ {
1857
+ "epoch": 1.5598952131459871,
1858
+ "grad_norm": 4.394635200500488,
1859
+ "learning_rate": 4.689873250244767e-05,
1860
+ "loss": 0.9538,
1861
+ "step": 6550
1862
+ },
1863
+ {
1864
+ "epoch": 1.565849011669445,
1865
+ "grad_norm": 17.616371154785156,
1866
+ "learning_rate": 4.686565584398402e-05,
1867
+ "loss": 1.1278,
1868
+ "step": 6575
1869
+ },
1870
+ {
1871
+ "epoch": 1.571802810192903,
1872
+ "grad_norm": 11.440567016601562,
1873
+ "learning_rate": 4.683257918552037e-05,
1874
+ "loss": 0.9045,
1875
+ "step": 6600
1876
+ },
1877
+ {
1878
+ "epoch": 1.577756608716361,
1879
+ "grad_norm": 7.166686058044434,
1880
+ "learning_rate": 4.679950252705671e-05,
1881
+ "loss": 1.012,
1882
+ "step": 6625
1883
+ },
1884
+ {
1885
+ "epoch": 1.5837104072398192,
1886
+ "grad_norm": 3.898224115371704,
1887
+ "learning_rate": 4.6766425868593055e-05,
1888
+ "loss": 1.0239,
1889
+ "step": 6650
1890
+ },
1891
+ {
1892
+ "epoch": 1.589664205763277,
1893
+ "grad_norm": 6.445250511169434,
1894
+ "learning_rate": 4.6733349210129395e-05,
1895
+ "loss": 1.0405,
1896
+ "step": 6675
1897
+ },
1898
+ {
1899
+ "epoch": 1.595618004286735,
1900
+ "grad_norm": 6.74089241027832,
1901
+ "learning_rate": 4.670027255166574e-05,
1902
+ "loss": 1.0741,
1903
+ "step": 6700
1904
+ },
1905
+ {
1906
+ "epoch": 1.6015718028101928,
1907
+ "grad_norm": 8.296637535095215,
1908
+ "learning_rate": 4.666719589320209e-05,
1909
+ "loss": 1.0257,
1910
+ "step": 6725
1911
+ },
1912
+ {
1913
+ "epoch": 1.6075256013336507,
1914
+ "grad_norm": 4.417313575744629,
1915
+ "learning_rate": 4.663411923473843e-05,
1916
+ "loss": 1.04,
1917
+ "step": 6750
1918
+ },
1919
+ {
1920
+ "epoch": 1.6134793998571089,
1921
+ "grad_norm": 9.14215087890625,
1922
+ "learning_rate": 4.660104257627478e-05,
1923
+ "loss": 0.953,
1924
+ "step": 6775
1925
+ },
1926
+ {
1927
+ "epoch": 1.6194331983805668,
1928
+ "grad_norm": 6.057753562927246,
1929
+ "learning_rate": 4.6567965917811125e-05,
1930
+ "loss": 1.0497,
1931
+ "step": 6800
1932
+ },
1933
+ {
1934
+ "epoch": 1.6253869969040249,
1935
+ "grad_norm": 5.951107501983643,
1936
+ "learning_rate": 4.6534889259347466e-05,
1937
+ "loss": 0.9889,
1938
+ "step": 6825
1939
+ },
1940
+ {
1941
+ "epoch": 1.6313407954274828,
1942
+ "grad_norm": 7.338003158569336,
1943
+ "learning_rate": 4.650181260088381e-05,
1944
+ "loss": 0.9164,
1945
+ "step": 6850
1946
+ },
1947
+ {
1948
+ "epoch": 1.6372945939509407,
1949
+ "grad_norm": 6.255341053009033,
1950
+ "learning_rate": 4.646873594242015e-05,
1951
+ "loss": 1.0507,
1952
+ "step": 6875
1953
+ },
1954
+ {
1955
+ "epoch": 1.6432483924743986,
1956
+ "grad_norm": 5.0770344734191895,
1957
+ "learning_rate": 4.64356592839565e-05,
1958
+ "loss": 0.988,
1959
+ "step": 6900
1960
+ },
1961
+ {
1962
+ "epoch": 1.6492021909978565,
1963
+ "grad_norm": 10.511390686035156,
1964
+ "learning_rate": 4.640258262549285e-05,
1965
+ "loss": 1.1466,
1966
+ "step": 6925
1967
+ },
1968
+ {
1969
+ "epoch": 1.6551559895213146,
1970
+ "grad_norm": 4.4176201820373535,
1971
+ "learning_rate": 4.636950596702919e-05,
1972
+ "loss": 0.9668,
1973
+ "step": 6950
1974
+ },
1975
+ {
1976
+ "epoch": 1.6611097880447727,
1977
+ "grad_norm": 7.061136722564697,
1978
+ "learning_rate": 4.633642930856553e-05,
1979
+ "loss": 1.1601,
1980
+ "step": 6975
1981
+ },
1982
+ {
1983
+ "epoch": 1.6670635865682306,
1984
+ "grad_norm": 6.016676425933838,
1985
+ "learning_rate": 4.6303352650101876e-05,
1986
+ "loss": 1.1006,
1987
+ "step": 7000
1988
+ },
1989
+ {
1990
+ "epoch": 1.6730173850916885,
1991
+ "grad_norm": 5.641656875610352,
1992
+ "learning_rate": 4.6270275991638224e-05,
1993
+ "loss": 0.9667,
1994
+ "step": 7025
1995
+ },
1996
+ {
1997
+ "epoch": 1.6789711836151464,
1998
+ "grad_norm": 6.386135101318359,
1999
+ "learning_rate": 4.623719933317457e-05,
2000
+ "loss": 1.0356,
2001
+ "step": 7050
2002
+ },
2003
+ {
2004
+ "epoch": 1.6849249821386043,
2005
+ "grad_norm": 6.999917507171631,
2006
+ "learning_rate": 4.620412267471091e-05,
2007
+ "loss": 1.0388,
2008
+ "step": 7075
2009
+ },
2010
+ {
2011
+ "epoch": 1.6908787806620624,
2012
+ "grad_norm": 6.0239129066467285,
2013
+ "learning_rate": 4.617104601624725e-05,
2014
+ "loss": 0.8963,
2015
+ "step": 7100
2016
+ },
2017
+ {
2018
+ "epoch": 1.6968325791855203,
2019
+ "grad_norm": 7.390036106109619,
2020
+ "learning_rate": 4.61379693577836e-05,
2021
+ "loss": 1.1959,
2022
+ "step": 7125
2023
+ },
2024
+ {
2025
+ "epoch": 1.7027863777089784,
2026
+ "grad_norm": 5.598720550537109,
2027
+ "learning_rate": 4.610489269931995e-05,
2028
+ "loss": 1.0214,
2029
+ "step": 7150
2030
+ },
2031
+ {
2032
+ "epoch": 1.7087401762324363,
2033
+ "grad_norm": 4.715810298919678,
2034
+ "learning_rate": 4.6071816040856294e-05,
2035
+ "loss": 0.8499,
2036
+ "step": 7175
2037
+ },
2038
+ {
2039
+ "epoch": 1.7146939747558942,
2040
+ "grad_norm": 4.418345928192139,
2041
+ "learning_rate": 4.6038739382392635e-05,
2042
+ "loss": 0.9179,
2043
+ "step": 7200
2044
+ },
2045
+ {
2046
+ "epoch": 1.7206477732793521,
2047
+ "grad_norm": 4.199166774749756,
2048
+ "learning_rate": 4.6005662723928975e-05,
2049
+ "loss": 0.9991,
2050
+ "step": 7225
2051
+ },
2052
+ {
2053
+ "epoch": 1.72660157180281,
2054
+ "grad_norm": 6.189085960388184,
2055
+ "learning_rate": 4.597258606546532e-05,
2056
+ "loss": 1.0998,
2057
+ "step": 7250
2058
+ },
2059
+ {
2060
+ "epoch": 1.7325553703262682,
2061
+ "grad_norm": 6.4427809715271,
2062
+ "learning_rate": 4.593950940700167e-05,
2063
+ "loss": 0.8524,
2064
+ "step": 7275
2065
+ },
2066
+ {
2067
+ "epoch": 1.7385091688497263,
2068
+ "grad_norm": 3.7036330699920654,
2069
+ "learning_rate": 4.590643274853802e-05,
2070
+ "loss": 0.9207,
2071
+ "step": 7300
2072
+ },
2073
+ {
2074
+ "epoch": 1.7444629673731842,
2075
+ "grad_norm": 10.30667495727539,
2076
+ "learning_rate": 4.587335609007436e-05,
2077
+ "loss": 1.0137,
2078
+ "step": 7325
2079
+ },
2080
+ {
2081
+ "epoch": 1.750416765896642,
2082
+ "grad_norm": 5.976552486419678,
2083
+ "learning_rate": 4.5840279431610705e-05,
2084
+ "loss": 1.0607,
2085
+ "step": 7350
2086
+ },
2087
+ {
2088
+ "epoch": 1.7563705644201,
2089
+ "grad_norm": 5.703713893890381,
2090
+ "learning_rate": 4.5807202773147046e-05,
2091
+ "loss": 0.9637,
2092
+ "step": 7375
2093
+ },
2094
+ {
2095
+ "epoch": 1.7623243629435579,
2096
+ "grad_norm": 4.452537536621094,
2097
+ "learning_rate": 4.577412611468339e-05,
2098
+ "loss": 0.969,
2099
+ "step": 7400
2100
+ },
2101
+ {
2102
+ "epoch": 1.768278161467016,
2103
+ "grad_norm": 3.4350435733795166,
2104
+ "learning_rate": 4.574104945621973e-05,
2105
+ "loss": 1.0022,
2106
+ "step": 7425
2107
+ },
2108
+ {
2109
+ "epoch": 1.774231959990474,
2110
+ "grad_norm": 4.28000545501709,
2111
+ "learning_rate": 4.570797279775608e-05,
2112
+ "loss": 1.0655,
2113
+ "step": 7450
2114
+ },
2115
+ {
2116
+ "epoch": 1.780185758513932,
2117
+ "grad_norm": 5.708723545074463,
2118
+ "learning_rate": 4.567489613929243e-05,
2119
+ "loss": 0.9273,
2120
+ "step": 7475
2121
+ },
2122
+ {
2123
+ "epoch": 1.78613955703739,
2124
+ "grad_norm": 6.020782470703125,
2125
+ "learning_rate": 4.5641819480828775e-05,
2126
+ "loss": 0.945,
2127
+ "step": 7500
2128
+ },
2129
+ {
2130
+ "epoch": 1.7920933555608478,
2131
+ "grad_norm": 4.774454593658447,
2132
+ "learning_rate": 4.5608742822365116e-05,
2133
+ "loss": 1.017,
2134
+ "step": 7525
2135
+ },
2136
+ {
2137
+ "epoch": 1.7980471540843057,
2138
+ "grad_norm": 5.020348072052002,
2139
+ "learning_rate": 4.5575666163901456e-05,
2140
+ "loss": 1.0637,
2141
+ "step": 7550
2142
+ },
2143
+ {
2144
+ "epoch": 1.8040009526077636,
2145
+ "grad_norm": 7.373719692230225,
2146
+ "learning_rate": 4.5542589505437804e-05,
2147
+ "loss": 1.0,
2148
+ "step": 7575
2149
+ },
2150
+ {
2151
+ "epoch": 1.8099547511312217,
2152
+ "grad_norm": 3.8169615268707275,
2153
+ "learning_rate": 4.550951284697415e-05,
2154
+ "loss": 0.9843,
2155
+ "step": 7600
2156
+ },
2157
+ {
2158
+ "epoch": 1.8159085496546798,
2159
+ "grad_norm": 6.140167236328125,
2160
+ "learning_rate": 4.54764361885105e-05,
2161
+ "loss": 0.9415,
2162
+ "step": 7625
2163
+ },
2164
+ {
2165
+ "epoch": 1.8218623481781377,
2166
+ "grad_norm": 9.949686050415039,
2167
+ "learning_rate": 4.544335953004684e-05,
2168
+ "loss": 1.0756,
2169
+ "step": 7650
2170
+ },
2171
+ {
2172
+ "epoch": 1.8278161467015956,
2173
+ "grad_norm": 5.582162380218506,
2174
+ "learning_rate": 4.541028287158318e-05,
2175
+ "loss": 0.9648,
2176
+ "step": 7675
2177
+ },
2178
+ {
2179
+ "epoch": 1.8337699452250535,
2180
+ "grad_norm": 7.782459259033203,
2181
+ "learning_rate": 4.537720621311953e-05,
2182
+ "loss": 1.0866,
2183
+ "step": 7700
2184
+ },
2185
+ {
2186
+ "epoch": 1.8397237437485114,
2187
+ "grad_norm": 4.777675151824951,
2188
+ "learning_rate": 4.5344129554655874e-05,
2189
+ "loss": 0.8936,
2190
+ "step": 7725
2191
+ },
2192
+ {
2193
+ "epoch": 1.8456775422719696,
2194
+ "grad_norm": 4.16018009185791,
2195
+ "learning_rate": 4.531105289619222e-05,
2196
+ "loss": 0.9454,
2197
+ "step": 7750
2198
+ },
2199
+ {
2200
+ "epoch": 1.8516313407954275,
2201
+ "grad_norm": 4.443974494934082,
2202
+ "learning_rate": 4.527797623772856e-05,
2203
+ "loss": 1.0104,
2204
+ "step": 7775
2205
+ },
2206
+ {
2207
+ "epoch": 1.8575851393188856,
2208
+ "grad_norm": 6.139829158782959,
2209
+ "learning_rate": 4.52448995792649e-05,
2210
+ "loss": 0.9752,
2211
+ "step": 7800
2212
+ },
2213
+ {
2214
+ "epoch": 1.8635389378423435,
2215
+ "grad_norm": 8.074860572814941,
2216
+ "learning_rate": 4.521182292080125e-05,
2217
+ "loss": 1.0173,
2218
+ "step": 7825
2219
+ },
2220
+ {
2221
+ "epoch": 1.8694927363658014,
2222
+ "grad_norm": 5.377435684204102,
2223
+ "learning_rate": 4.51787462623376e-05,
2224
+ "loss": 0.8224,
2225
+ "step": 7850
2226
+ },
2227
+ {
2228
+ "epoch": 1.8754465348892593,
2229
+ "grad_norm": 8.767934799194336,
2230
+ "learning_rate": 4.5145669603873944e-05,
2231
+ "loss": 1.1385,
2232
+ "step": 7875
2233
+ },
2234
+ {
2235
+ "epoch": 1.8814003334127172,
2236
+ "grad_norm": 7.324585914611816,
2237
+ "learning_rate": 4.5112592945410285e-05,
2238
+ "loss": 1.0644,
2239
+ "step": 7900
2240
+ },
2241
+ {
2242
+ "epoch": 1.8873541319361753,
2243
+ "grad_norm": 9.182265281677246,
2244
+ "learning_rate": 4.5079516286946626e-05,
2245
+ "loss": 1.102,
2246
+ "step": 7925
2247
+ },
2248
+ {
2249
+ "epoch": 1.8933079304596332,
2250
+ "grad_norm": 7.2876715660095215,
2251
+ "learning_rate": 4.504643962848297e-05,
2252
+ "loss": 0.9735,
2253
+ "step": 7950
2254
+ },
2255
+ {
2256
+ "epoch": 1.8992617289830913,
2257
+ "grad_norm": 6.998351097106934,
2258
+ "learning_rate": 4.501336297001932e-05,
2259
+ "loss": 0.956,
2260
+ "step": 7975
2261
+ },
2262
+ {
2263
+ "epoch": 1.9052155275065492,
2264
+ "grad_norm": 4.216850280761719,
2265
+ "learning_rate": 4.498028631155566e-05,
2266
+ "loss": 0.9517,
2267
+ "step": 8000
2268
+ },
2269
+ {
2270
+ "epoch": 1.911169326030007,
2271
+ "grad_norm": 3.5328800678253174,
2272
+ "learning_rate": 4.494720965309201e-05,
2273
+ "loss": 1.0805,
2274
+ "step": 8025
2275
+ },
2276
+ {
2277
+ "epoch": 1.917123124553465,
2278
+ "grad_norm": 6.827732086181641,
2279
+ "learning_rate": 4.4914132994628355e-05,
2280
+ "loss": 1.0643,
2281
+ "step": 8050
2282
+ },
2283
+ {
2284
+ "epoch": 1.9230769230769231,
2285
+ "grad_norm": 6.08967399597168,
2286
+ "learning_rate": 4.4881056336164696e-05,
2287
+ "loss": 1.0416,
2288
+ "step": 8075
2289
+ },
2290
+ {
2291
+ "epoch": 1.929030721600381,
2292
+ "grad_norm": 8.008926391601562,
2293
+ "learning_rate": 4.484797967770104e-05,
2294
+ "loss": 0.9322,
2295
+ "step": 8100
2296
+ },
2297
+ {
2298
+ "epoch": 1.9349845201238391,
2299
+ "grad_norm": 13.753870964050293,
2300
+ "learning_rate": 4.4814903019237384e-05,
2301
+ "loss": 0.8809,
2302
+ "step": 8125
2303
+ },
2304
+ {
2305
+ "epoch": 1.940938318647297,
2306
+ "grad_norm": 9.109087944030762,
2307
+ "learning_rate": 4.478182636077373e-05,
2308
+ "loss": 0.9042,
2309
+ "step": 8150
2310
+ },
2311
+ {
2312
+ "epoch": 1.946892117170755,
2313
+ "grad_norm": 6.401242256164551,
2314
+ "learning_rate": 4.474874970231008e-05,
2315
+ "loss": 0.9542,
2316
+ "step": 8175
2317
+ },
2318
+ {
2319
+ "epoch": 1.9528459156942128,
2320
+ "grad_norm": 10.232426643371582,
2321
+ "learning_rate": 4.4715673043846426e-05,
2322
+ "loss": 1.1949,
2323
+ "step": 8200
2324
+ },
2325
+ {
2326
+ "epoch": 1.9587997142176707,
2327
+ "grad_norm": 5.458783149719238,
2328
+ "learning_rate": 4.4682596385382766e-05,
2329
+ "loss": 0.8728,
2330
+ "step": 8225
2331
+ },
2332
+ {
2333
+ "epoch": 1.9647535127411289,
2334
+ "grad_norm": 8.005071640014648,
2335
+ "learning_rate": 4.464951972691911e-05,
2336
+ "loss": 1.0506,
2337
+ "step": 8250
2338
+ },
2339
+ {
2340
+ "epoch": 1.9707073112645868,
2341
+ "grad_norm": 8.608522415161133,
2342
+ "learning_rate": 4.4616443068455454e-05,
2343
+ "loss": 1.0458,
2344
+ "step": 8275
2345
+ },
2346
+ {
2347
+ "epoch": 1.9766611097880449,
2348
+ "grad_norm": 7.659764289855957,
2349
+ "learning_rate": 4.45833664099918e-05,
2350
+ "loss": 0.9629,
2351
+ "step": 8300
2352
+ },
2353
+ {
2354
+ "epoch": 1.9826149083115028,
2355
+ "grad_norm": 6.268425464630127,
2356
+ "learning_rate": 4.455028975152815e-05,
2357
+ "loss": 0.9374,
2358
+ "step": 8325
2359
+ },
2360
+ {
2361
+ "epoch": 1.9885687068349607,
2362
+ "grad_norm": 6.64276123046875,
2363
+ "learning_rate": 4.451721309306449e-05,
2364
+ "loss": 0.9243,
2365
+ "step": 8350
2366
+ },
2367
+ {
2368
+ "epoch": 1.9945225053584186,
2369
+ "grad_norm": 7.175469398498535,
2370
+ "learning_rate": 4.448413643460083e-05,
2371
+ "loss": 1.1004,
2372
+ "step": 8375
2373
+ },
2374
+ {
2375
+ "epoch": 2.0,
2376
+ "eval_accuracy": 0.6112102820421278,
2377
+ "eval_f1_macro": 0.5315462561949883,
2378
+ "eval_f1_micro": 0.6112102820421278,
2379
+ "eval_f1_weighted": 0.6025346871719214,
2380
+ "eval_loss": 1.0703290700912476,
2381
+ "eval_precision_macro": 0.6157278778731476,
2382
+ "eval_precision_micro": 0.6112102820421278,
2383
+ "eval_precision_weighted": 0.635344882882464,
2384
+ "eval_recall_macro": 0.5271202118810513,
2385
+ "eval_recall_micro": 0.6112102820421278,
2386
+ "eval_recall_weighted": 0.6112102820421278,
2387
+ "eval_runtime": 25.2331,
2388
+ "eval_samples_per_second": 333.014,
2389
+ "eval_steps_per_second": 20.846,
2390
+ "step": 8398
2391
+ }
2392
+ ],
2393
+ "logging_steps": 25,
2394
+ "max_steps": 41990,
2395
+ "num_input_tokens_seen": 0,
2396
+ "num_train_epochs": 10,
2397
+ "save_steps": 500,
2398
+ "stateful_callbacks": {
2399
+ "EarlyStoppingCallback": {
2400
+ "args": {
2401
+ "early_stopping_patience": 5,
2402
+ "early_stopping_threshold": 0.01
2403
+ },
2404
+ "attributes": {
2405
+ "early_stopping_patience_counter": 0
2406
+ }
2407
+ },
2408
+ "TrainerControl": {
2409
+ "args": {
2410
+ "should_epoch_stop": false,
2411
+ "should_evaluate": false,
2412
+ "should_log": false,
2413
+ "should_save": true,
2414
+ "should_training_stop": false
2415
+ },
2416
+ "attributes": {}
2417
+ }
2418
+ },
2419
+ "total_flos": 1.7761706134611456e+16,
2420
+ "train_batch_size": 8,
2421
+ "trial_name": null,
2422
+ "trial_params": null
2423
+ }
checkpoint-8398/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81528ef86348f13753ac9c95b4717d84c6154cea233a98a91f71c7af256567a1
3
+ size 5368
config.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "answerdotai/ModernBERT-large",
3
+ "_num_labels": 13,
4
+ "architectures": [
5
+ "ModernBertForSequenceClassification"
6
+ ],
7
+ "attention_bias": false,
8
+ "attention_dropout": 0.0,
9
+ "bos_token_id": 50281,
10
+ "classifier_activation": "gelu",
11
+ "classifier_bias": false,
12
+ "classifier_dropout": 0.0,
13
+ "classifier_pooling": "mean",
14
+ "cls_token_id": 50281,
15
+ "decoder_bias": true,
16
+ "deterministic_flash_attn": false,
17
+ "embedding_dropout": 0.0,
18
+ "eos_token_id": 50282,
19
+ "global_attn_every_n_layers": 3,
20
+ "global_rope_theta": 160000.0,
21
+ "gradient_checkpointing": false,
22
+ "hidden_activation": "gelu",
23
+ "hidden_size": 1024,
24
+ "id2label": {
25
+ "0": "dunya",
26
+ "1": "ekonomi",
27
+ "2": "genel",
28
+ "3": "guncel",
29
+ "4": "kultur-sanat",
30
+ "5": "magazin",
31
+ "6": "planet",
32
+ "7": "saglik",
33
+ "8": "siyaset",
34
+ "9": "spor",
35
+ "10": "teknoloji",
36
+ "11": "turkiye",
37
+ "12": "yasam"
38
+ },
39
+ "initializer_cutoff_factor": 2.0,
40
+ "initializer_range": 0.02,
41
+ "intermediate_size": 2624,
42
+ "label2id": {
43
+ "dunya": 0,
44
+ "ekonomi": 1,
45
+ "genel": 2,
46
+ "guncel": 3,
47
+ "kultur-sanat": 4,
48
+ "magazin": 5,
49
+ "planet": 6,
50
+ "saglik": 7,
51
+ "siyaset": 8,
52
+ "spor": 9,
53
+ "teknoloji": 10,
54
+ "turkiye": 11,
55
+ "yasam": 12
56
+ },
57
+ "layer_norm_eps": 1e-05,
58
+ "local_attention": 128,
59
+ "local_rope_theta": 10000.0,
60
+ "max_position_embeddings": 8192,
61
+ "mlp_bias": false,
62
+ "mlp_dropout": 0.0,
63
+ "model_type": "modernbert",
64
+ "norm_bias": false,
65
+ "norm_eps": 1e-05,
66
+ "num_attention_heads": 16,
67
+ "num_hidden_layers": 28,
68
+ "pad_token_id": 50283,
69
+ "position_embedding_type": "absolute",
70
+ "problem_type": "single_label_classification",
71
+ "reference_compile": true,
72
+ "repad_logits_with_grad": false,
73
+ "sep_token_id": 50282,
74
+ "sparse_pred_ignore_index": -100,
75
+ "sparse_prediction": false,
76
+ "torch_dtype": "float32",
77
+ "transformers_version": "4.48.0",
78
+ "vocab_size": 50368
79
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b75e3dd58b60ccea590aaa484c65e1584092399c75cb03d73d08cc771b5b9a22
3
+ size 1583396740
runs/Mar08_21-48-47_umit-MS-7E07/events.out.tfevents.1741459728.umit-MS-7E07.5173.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0265d28f0379f6b61421f0e83f502ddeaf9598e7472cf8f9fdaf2e1b013598da
3
- size 220467
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b60c077d2c5ab0a7211c386d0f2f00aeebbd480956e4c64046478d30b81de54
3
+ size 262513
runs/Mar08_21-48-47_umit-MS-7E07/events.out.tfevents.1741463362.umit-MS-7E07.5173.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c19606b6f7caef3e3123097a93456174e54f78c184fca57fb1d81c1028e886f
3
+ size 936
special_tokens_map.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "mask_token": {
10
+ "content": "[MASK]",
11
+ "lstrip": true,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "sep_token": {
24
+ "content": "[SEP]",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "unk_token": {
31
+ "content": "[UNK]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ }
37
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,945 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "|||IP_ADDRESS|||",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": false
10
+ },
11
+ "1": {
12
+ "content": "<|padding|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "50254": {
20
+ "content": " ",
21
+ "lstrip": false,
22
+ "normalized": true,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": false
26
+ },
27
+ "50255": {
28
+ "content": " ",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": false
34
+ },
35
+ "50256": {
36
+ "content": " ",
37
+ "lstrip": false,
38
+ "normalized": true,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": false
42
+ },
43
+ "50257": {
44
+ "content": " ",
45
+ "lstrip": false,
46
+ "normalized": true,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": false
50
+ },
51
+ "50258": {
52
+ "content": " ",
53
+ "lstrip": false,
54
+ "normalized": true,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": false
58
+ },
59
+ "50259": {
60
+ "content": " ",
61
+ "lstrip": false,
62
+ "normalized": true,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": false
66
+ },
67
+ "50260": {
68
+ "content": " ",
69
+ "lstrip": false,
70
+ "normalized": true,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": false
74
+ },
75
+ "50261": {
76
+ "content": " ",
77
+ "lstrip": false,
78
+ "normalized": true,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": false
82
+ },
83
+ "50262": {
84
+ "content": " ",
85
+ "lstrip": false,
86
+ "normalized": true,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": false
90
+ },
91
+ "50263": {
92
+ "content": " ",
93
+ "lstrip": false,
94
+ "normalized": true,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": false
98
+ },
99
+ "50264": {
100
+ "content": " ",
101
+ "lstrip": false,
102
+ "normalized": true,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": false
106
+ },
107
+ "50265": {
108
+ "content": " ",
109
+ "lstrip": false,
110
+ "normalized": true,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": false
114
+ },
115
+ "50266": {
116
+ "content": " ",
117
+ "lstrip": false,
118
+ "normalized": true,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": false
122
+ },
123
+ "50267": {
124
+ "content": " ",
125
+ "lstrip": false,
126
+ "normalized": true,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": false
130
+ },
131
+ "50268": {
132
+ "content": " ",
133
+ "lstrip": false,
134
+ "normalized": true,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": false
138
+ },
139
+ "50269": {
140
+ "content": " ",
141
+ "lstrip": false,
142
+ "normalized": true,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": false
146
+ },
147
+ "50270": {
148
+ "content": " ",
149
+ "lstrip": false,
150
+ "normalized": true,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": false
154
+ },
155
+ "50271": {
156
+ "content": " ",
157
+ "lstrip": false,
158
+ "normalized": true,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": false
162
+ },
163
+ "50272": {
164
+ "content": " ",
165
+ "lstrip": false,
166
+ "normalized": true,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": false
170
+ },
171
+ "50273": {
172
+ "content": " ",
173
+ "lstrip": false,
174
+ "normalized": true,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": false
178
+ },
179
+ "50274": {
180
+ "content": " ",
181
+ "lstrip": false,
182
+ "normalized": true,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": false
186
+ },
187
+ "50275": {
188
+ "content": " ",
189
+ "lstrip": false,
190
+ "normalized": true,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": false
194
+ },
195
+ "50276": {
196
+ "content": " ",
197
+ "lstrip": false,
198
+ "normalized": true,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": false
202
+ },
203
+ "50277": {
204
+ "content": "|||EMAIL_ADDRESS|||",
205
+ "lstrip": false,
206
+ "normalized": true,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": false
210
+ },
211
+ "50278": {
212
+ "content": "|||PHONE_NUMBER|||",
213
+ "lstrip": false,
214
+ "normalized": true,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": false
218
+ },
219
+ "50279": {
220
+ "content": "<|endoftext|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "50280": {
228
+ "content": "[UNK]",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "50281": {
236
+ "content": "[CLS]",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "50282": {
244
+ "content": "[SEP]",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "50283": {
252
+ "content": "[PAD]",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "50284": {
260
+ "content": "[MASK]",
261
+ "lstrip": true,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "50285": {
268
+ "content": "[unused0]",
269
+ "lstrip": false,
270
+ "normalized": true,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": false
274
+ },
275
+ "50286": {
276
+ "content": "[unused1]",
277
+ "lstrip": false,
278
+ "normalized": true,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": false
282
+ },
283
+ "50287": {
284
+ "content": "[unused2]",
285
+ "lstrip": false,
286
+ "normalized": true,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": false
290
+ },
291
+ "50288": {
292
+ "content": "[unused3]",
293
+ "lstrip": false,
294
+ "normalized": true,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": false
298
+ },
299
+ "50289": {
300
+ "content": "[unused4]",
301
+ "lstrip": false,
302
+ "normalized": true,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": false
306
+ },
307
+ "50290": {
308
+ "content": "[unused5]",
309
+ "lstrip": false,
310
+ "normalized": true,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": false
314
+ },
315
+ "50291": {
316
+ "content": "[unused6]",
317
+ "lstrip": false,
318
+ "normalized": true,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": false
322
+ },
323
+ "50292": {
324
+ "content": "[unused7]",
325
+ "lstrip": false,
326
+ "normalized": true,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": false
330
+ },
331
+ "50293": {
332
+ "content": "[unused8]",
333
+ "lstrip": false,
334
+ "normalized": true,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": false
338
+ },
339
+ "50294": {
340
+ "content": "[unused9]",
341
+ "lstrip": false,
342
+ "normalized": true,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": false
346
+ },
347
+ "50295": {
348
+ "content": "[unused10]",
349
+ "lstrip": false,
350
+ "normalized": true,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": false
354
+ },
355
+ "50296": {
356
+ "content": "[unused11]",
357
+ "lstrip": false,
358
+ "normalized": true,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": false
362
+ },
363
+ "50297": {
364
+ "content": "[unused12]",
365
+ "lstrip": false,
366
+ "normalized": true,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": false
370
+ },
371
+ "50298": {
372
+ "content": "[unused13]",
373
+ "lstrip": false,
374
+ "normalized": true,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": false
378
+ },
379
+ "50299": {
380
+ "content": "[unused14]",
381
+ "lstrip": false,
382
+ "normalized": true,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": false
386
+ },
387
+ "50300": {
388
+ "content": "[unused15]",
389
+ "lstrip": false,
390
+ "normalized": true,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": false
394
+ },
395
+ "50301": {
396
+ "content": "[unused16]",
397
+ "lstrip": false,
398
+ "normalized": true,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": false
402
+ },
403
+ "50302": {
404
+ "content": "[unused17]",
405
+ "lstrip": false,
406
+ "normalized": true,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": false
410
+ },
411
+ "50303": {
412
+ "content": "[unused18]",
413
+ "lstrip": false,
414
+ "normalized": true,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": false
418
+ },
419
+ "50304": {
420
+ "content": "[unused19]",
421
+ "lstrip": false,
422
+ "normalized": true,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": false
426
+ },
427
+ "50305": {
428
+ "content": "[unused20]",
429
+ "lstrip": false,
430
+ "normalized": true,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": false
434
+ },
435
+ "50306": {
436
+ "content": "[unused21]",
437
+ "lstrip": false,
438
+ "normalized": true,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": false
442
+ },
443
+ "50307": {
444
+ "content": "[unused22]",
445
+ "lstrip": false,
446
+ "normalized": true,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": false
450
+ },
451
+ "50308": {
452
+ "content": "[unused23]",
453
+ "lstrip": false,
454
+ "normalized": true,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": false
458
+ },
459
+ "50309": {
460
+ "content": "[unused24]",
461
+ "lstrip": false,
462
+ "normalized": true,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": false
466
+ },
467
+ "50310": {
468
+ "content": "[unused25]",
469
+ "lstrip": false,
470
+ "normalized": true,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": false
474
+ },
475
+ "50311": {
476
+ "content": "[unused26]",
477
+ "lstrip": false,
478
+ "normalized": true,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": false
482
+ },
483
+ "50312": {
484
+ "content": "[unused27]",
485
+ "lstrip": false,
486
+ "normalized": true,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": false
490
+ },
491
+ "50313": {
492
+ "content": "[unused28]",
493
+ "lstrip": false,
494
+ "normalized": true,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": false
498
+ },
499
+ "50314": {
500
+ "content": "[unused29]",
501
+ "lstrip": false,
502
+ "normalized": true,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": false
506
+ },
507
+ "50315": {
508
+ "content": "[unused30]",
509
+ "lstrip": false,
510
+ "normalized": true,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": false
514
+ },
515
+ "50316": {
516
+ "content": "[unused31]",
517
+ "lstrip": false,
518
+ "normalized": true,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": false
522
+ },
523
+ "50317": {
524
+ "content": "[unused32]",
525
+ "lstrip": false,
526
+ "normalized": true,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": false
530
+ },
531
+ "50318": {
532
+ "content": "[unused33]",
533
+ "lstrip": false,
534
+ "normalized": true,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": false
538
+ },
539
+ "50319": {
540
+ "content": "[unused34]",
541
+ "lstrip": false,
542
+ "normalized": true,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": false
546
+ },
547
+ "50320": {
548
+ "content": "[unused35]",
549
+ "lstrip": false,
550
+ "normalized": true,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": false
554
+ },
555
+ "50321": {
556
+ "content": "[unused36]",
557
+ "lstrip": false,
558
+ "normalized": true,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": false
562
+ },
563
+ "50322": {
564
+ "content": "[unused37]",
565
+ "lstrip": false,
566
+ "normalized": true,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": false
570
+ },
571
+ "50323": {
572
+ "content": "[unused38]",
573
+ "lstrip": false,
574
+ "normalized": true,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": false
578
+ },
579
+ "50324": {
580
+ "content": "[unused39]",
581
+ "lstrip": false,
582
+ "normalized": true,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": false
586
+ },
587
+ "50325": {
588
+ "content": "[unused40]",
589
+ "lstrip": false,
590
+ "normalized": true,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": false
594
+ },
595
+ "50326": {
596
+ "content": "[unused41]",
597
+ "lstrip": false,
598
+ "normalized": true,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": false
602
+ },
603
+ "50327": {
604
+ "content": "[unused42]",
605
+ "lstrip": false,
606
+ "normalized": true,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": false
610
+ },
611
+ "50328": {
612
+ "content": "[unused43]",
613
+ "lstrip": false,
614
+ "normalized": true,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": false
618
+ },
619
+ "50329": {
620
+ "content": "[unused44]",
621
+ "lstrip": false,
622
+ "normalized": true,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": false
626
+ },
627
+ "50330": {
628
+ "content": "[unused45]",
629
+ "lstrip": false,
630
+ "normalized": true,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": false
634
+ },
635
+ "50331": {
636
+ "content": "[unused46]",
637
+ "lstrip": false,
638
+ "normalized": true,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": false
642
+ },
643
+ "50332": {
644
+ "content": "[unused47]",
645
+ "lstrip": false,
646
+ "normalized": true,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": false
650
+ },
651
+ "50333": {
652
+ "content": "[unused48]",
653
+ "lstrip": false,
654
+ "normalized": true,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": false
658
+ },
659
+ "50334": {
660
+ "content": "[unused49]",
661
+ "lstrip": false,
662
+ "normalized": true,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": false
666
+ },
667
+ "50335": {
668
+ "content": "[unused50]",
669
+ "lstrip": false,
670
+ "normalized": true,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": false
674
+ },
675
+ "50336": {
676
+ "content": "[unused51]",
677
+ "lstrip": false,
678
+ "normalized": true,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": false
682
+ },
683
+ "50337": {
684
+ "content": "[unused52]",
685
+ "lstrip": false,
686
+ "normalized": true,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": false
690
+ },
691
+ "50338": {
692
+ "content": "[unused53]",
693
+ "lstrip": false,
694
+ "normalized": true,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": false
698
+ },
699
+ "50339": {
700
+ "content": "[unused54]",
701
+ "lstrip": false,
702
+ "normalized": true,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": false
706
+ },
707
+ "50340": {
708
+ "content": "[unused55]",
709
+ "lstrip": false,
710
+ "normalized": true,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": false
714
+ },
715
+ "50341": {
716
+ "content": "[unused56]",
717
+ "lstrip": false,
718
+ "normalized": true,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": false
722
+ },
723
+ "50342": {
724
+ "content": "[unused57]",
725
+ "lstrip": false,
726
+ "normalized": true,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": false
730
+ },
731
+ "50343": {
732
+ "content": "[unused58]",
733
+ "lstrip": false,
734
+ "normalized": true,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": false
738
+ },
739
+ "50344": {
740
+ "content": "[unused59]",
741
+ "lstrip": false,
742
+ "normalized": true,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": false
746
+ },
747
+ "50345": {
748
+ "content": "[unused60]",
749
+ "lstrip": false,
750
+ "normalized": true,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": false
754
+ },
755
+ "50346": {
756
+ "content": "[unused61]",
757
+ "lstrip": false,
758
+ "normalized": true,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": false
762
+ },
763
+ "50347": {
764
+ "content": "[unused62]",
765
+ "lstrip": false,
766
+ "normalized": true,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": false
770
+ },
771
+ "50348": {
772
+ "content": "[unused63]",
773
+ "lstrip": false,
774
+ "normalized": true,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": false
778
+ },
779
+ "50349": {
780
+ "content": "[unused64]",
781
+ "lstrip": false,
782
+ "normalized": true,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": false
786
+ },
787
+ "50350": {
788
+ "content": "[unused65]",
789
+ "lstrip": false,
790
+ "normalized": true,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": false
794
+ },
795
+ "50351": {
796
+ "content": "[unused66]",
797
+ "lstrip": false,
798
+ "normalized": true,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": false
802
+ },
803
+ "50352": {
804
+ "content": "[unused67]",
805
+ "lstrip": false,
806
+ "normalized": true,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": false
810
+ },
811
+ "50353": {
812
+ "content": "[unused68]",
813
+ "lstrip": false,
814
+ "normalized": true,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": false
818
+ },
819
+ "50354": {
820
+ "content": "[unused69]",
821
+ "lstrip": false,
822
+ "normalized": true,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": false
826
+ },
827
+ "50355": {
828
+ "content": "[unused70]",
829
+ "lstrip": false,
830
+ "normalized": true,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": false
834
+ },
835
+ "50356": {
836
+ "content": "[unused71]",
837
+ "lstrip": false,
838
+ "normalized": true,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": false
842
+ },
843
+ "50357": {
844
+ "content": "[unused72]",
845
+ "lstrip": false,
846
+ "normalized": true,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": false
850
+ },
851
+ "50358": {
852
+ "content": "[unused73]",
853
+ "lstrip": false,
854
+ "normalized": true,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": false
858
+ },
859
+ "50359": {
860
+ "content": "[unused74]",
861
+ "lstrip": false,
862
+ "normalized": true,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": false
866
+ },
867
+ "50360": {
868
+ "content": "[unused75]",
869
+ "lstrip": false,
870
+ "normalized": true,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": false
874
+ },
875
+ "50361": {
876
+ "content": "[unused76]",
877
+ "lstrip": false,
878
+ "normalized": true,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": false
882
+ },
883
+ "50362": {
884
+ "content": "[unused77]",
885
+ "lstrip": false,
886
+ "normalized": true,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": false
890
+ },
891
+ "50363": {
892
+ "content": "[unused78]",
893
+ "lstrip": false,
894
+ "normalized": true,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": false
898
+ },
899
+ "50364": {
900
+ "content": "[unused79]",
901
+ "lstrip": false,
902
+ "normalized": true,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": false
906
+ },
907
+ "50365": {
908
+ "content": "[unused80]",
909
+ "lstrip": false,
910
+ "normalized": true,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": false
914
+ },
915
+ "50366": {
916
+ "content": "[unused81]",
917
+ "lstrip": false,
918
+ "normalized": true,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": false
922
+ },
923
+ "50367": {
924
+ "content": "[unused82]",
925
+ "lstrip": false,
926
+ "normalized": true,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": false
930
+ }
931
+ },
932
+ "clean_up_tokenization_spaces": true,
933
+ "cls_token": "[CLS]",
934
+ "extra_special_tokens": {},
935
+ "mask_token": "[MASK]",
936
+ "model_input_names": [
937
+ "input_ids",
938
+ "attention_mask"
939
+ ],
940
+ "model_max_length": 8192,
941
+ "pad_token": "[PAD]",
942
+ "sep_token": "[SEP]",
943
+ "tokenizer_class": "PreTrainedTokenizerFast",
944
+ "unk_token": "[UNK]"
945
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81528ef86348f13753ac9c95b4717d84c6154cea233a98a91f71c7af256567a1
3
+ size 5368
training_params.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "data_path": "uisikdag/42news",
3
+ "model": "answerdotai/ModernBERT-large",
4
+ "lr": 5e-05,
5
+ "epochs": 10,
6
+ "max_seq_length": 128,
7
+ "batch_size": 8,
8
+ "warmup_ratio": 0.1,
9
+ "gradient_accumulation": 1,
10
+ "optimizer": "adamw_torch",
11
+ "scheduler": "linear",
12
+ "weight_decay": 0.0,
13
+ "max_grad_norm": 1.0,
14
+ "seed": 42,
15
+ "train_split": "train",
16
+ "valid_split": "test",
17
+ "text_column": "text",
18
+ "target_column": "label",
19
+ "logging_steps": -1,
20
+ "project_name": "autotrain-modernBERT-large",
21
+ "auto_find_batch_size": false,
22
+ "mixed_precision": "fp16",
23
+ "save_total_limit": 1,
24
+ "push_to_hub": true,
25
+ "eval_strategy": "epoch",
26
+ "username": "uisikdag",
27
+ "log": "tensorboard",
28
+ "early_stopping_patience": 5,
29
+ "early_stopping_threshold": 0.01
30
+ }