jackcai1206 commited on
Commit
5a2443e
·
verified ·
1 Parent(s): 96ec2b4
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +10 -0
  2. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/config.json +38 -0
  3. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/generation_config.json +7 -0
  4. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/merges.txt +0 -0
  5. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/model.safetensors +3 -0
  6. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/optimizer.pt +3 -0
  7. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/scheduler.pt +3 -0
  8. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/special_tokens_map.json +34 -0
  9. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/tokenizer.json +0 -0
  10. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/tokenizer_config.json +155 -0
  11. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/training_state.json +1 -0
  12. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/vocab.json +0 -0
  13. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/config.json +38 -0
  14. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/generation_config.json +7 -0
  15. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/merges.txt +0 -0
  16. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/model.safetensors +3 -0
  17. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/optimizer.pt +3 -0
  18. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/scheduler.pt +3 -0
  19. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/special_tokens_map.json +34 -0
  20. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/tokenizer.json +0 -0
  21. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/tokenizer_config.json +155 -0
  22. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/training_state.json +1 -0
  23. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/vocab.json +0 -0
  24. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/config.json +38 -0
  25. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/generation_config.json +7 -0
  26. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/merges.txt +0 -0
  27. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/model.safetensors +3 -0
  28. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/optimizer.pt +3 -0
  29. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/scheduler.pt +3 -0
  30. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/special_tokens_map.json +34 -0
  31. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/tokenizer.json +0 -0
  32. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/tokenizer_config.json +155 -0
  33. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/training_state.json +1 -0
  34. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/vocab.json +0 -0
  35. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/config.json +38 -0
  36. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/generation_config.json +7 -0
  37. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/merges.txt +0 -0
  38. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/model.safetensors +3 -0
  39. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/optimizer.pt +3 -0
  40. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/scheduler.pt +3 -0
  41. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/special_tokens_map.json +34 -0
  42. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/tokenizer.json +0 -0
  43. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/tokenizer_config.json +155 -0
  44. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/training_state.json +1 -0
  45. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/vocab.json +0 -0
  46. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_4-no_label/global_step_187/config.json +38 -0
  47. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_4-no_label/global_step_187/generation_config.json +7 -0
  48. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_4-no_label/global_step_187/merges.txt +0 -0
  49. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_4-no_label/global_step_187/model.safetensors +3 -0
  50. checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_4-no_label/global_step_187/optimizer.pt +3 -0
.gitattributes CHANGED
@@ -39,3 +39,13 @@ checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_1/global_step_184/to
39
  checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_1/global_step_46/tokenizer.json filter=lfs diff=lfs merge=lfs -text
40
  checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_1/global_step_92/tokenizer.json filter=lfs diff=lfs merge=lfs -text
41
  checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_8/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
39
  checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_1/global_step_46/tokenizer.json filter=lfs diff=lfs merge=lfs -text
40
  checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_1/global_step_92/tokenizer.json filter=lfs diff=lfs merge=lfs -text
41
  checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_8/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
42
+ checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_1-no_label/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
43
+ checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_1-self_consistency_5/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
44
+ checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_2-no_label/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
45
+ checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_2-self_consistency_5/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
46
+ checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_3-no_label/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
47
+ checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_4-no_label/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
48
+ checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_5-no_label/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
49
+ checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_6-no_label/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
50
+ checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_7-no_label/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
51
+ checkpoints/igsm-sft-Qwen/Qwen2.5-0.5B-Instruct-liger-round_8-no_label/global_step_187/tokenizer.json filter=lfs diff=lfs merge=lfs -text
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "head_dim": 64,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 576,
12
+ "initializer_range": 0.041666666666666664,
13
+ "intermediate_size": 1536,
14
+ "is_llama_config": true,
15
+ "max_position_embeddings": 8192,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 9,
19
+ "num_hidden_layers": 30,
20
+ "num_key_value_heads": 3,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_interleaved": false,
25
+ "rope_scaling": null,
26
+ "rope_theta": 100000,
27
+ "tie_word_embeddings": true,
28
+ "torch_dtype": "float32",
29
+ "transformers.js_config": {
30
+ "kv_cache_dtype": {
31
+ "fp16": "float16",
32
+ "q4f16": "float16"
33
+ }
34
+ },
35
+ "transformers_version": "4.51.3",
36
+ "use_cache": true,
37
+ "vocab_size": 49152
38
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
+ "transformers_version": "4.51.3"
7
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3461a07155736f000ec71bc121b4f6d5ff56366f1f412d6fa475188a6174e0c
3
+ size 651336704
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff7c2321d45d50296b93d77d551f4567eb267d87bc168cada8ca75bb89a1e0a6
3
+ size 1076166657
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d184b56afe5fc616a4fd2f5562ac74acbb8066bd536f59eafa311c1dc0afc58
3
+ size 1000
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful AI assistant named SmolLM, trained by Hugging Face<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "extra_special_tokens": {},
150
+ "model_max_length": 8192,
151
+ "pad_token": "<|im_end|>",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/training_state.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"step": 187, "epoch": 0}
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-no_label/global_step_187/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "head_dim": 64,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 576,
12
+ "initializer_range": 0.041666666666666664,
13
+ "intermediate_size": 1536,
14
+ "is_llama_config": true,
15
+ "max_position_embeddings": 8192,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 9,
19
+ "num_hidden_layers": 30,
20
+ "num_key_value_heads": 3,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_interleaved": false,
25
+ "rope_scaling": null,
26
+ "rope_theta": 100000,
27
+ "tie_word_embeddings": true,
28
+ "torch_dtype": "float32",
29
+ "transformers.js_config": {
30
+ "kv_cache_dtype": {
31
+ "fp16": "float16",
32
+ "q4f16": "float16"
33
+ }
34
+ },
35
+ "transformers_version": "4.51.3",
36
+ "use_cache": true,
37
+ "vocab_size": 49152
38
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
+ "transformers_version": "4.51.3"
7
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3461a07155736f000ec71bc121b4f6d5ff56366f1f412d6fa475188a6174e0c
3
+ size 651336704
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff7c2321d45d50296b93d77d551f4567eb267d87bc168cada8ca75bb89a1e0a6
3
+ size 1076166657
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d184b56afe5fc616a4fd2f5562ac74acbb8066bd536f59eafa311c1dc0afc58
3
+ size 1000
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful AI assistant named SmolLM, trained by Hugging Face<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "extra_special_tokens": {},
150
+ "model_max_length": 8192,
151
+ "pad_token": "<|im_end|>",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/training_state.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"step": 187, "epoch": 0}
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_1-self_consistency_5/global_step_187/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "head_dim": 64,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 576,
12
+ "initializer_range": 0.041666666666666664,
13
+ "intermediate_size": 1536,
14
+ "is_llama_config": true,
15
+ "max_position_embeddings": 8192,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 9,
19
+ "num_hidden_layers": 30,
20
+ "num_key_value_heads": 3,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_interleaved": false,
25
+ "rope_scaling": null,
26
+ "rope_theta": 100000,
27
+ "tie_word_embeddings": true,
28
+ "torch_dtype": "float32",
29
+ "transformers.js_config": {
30
+ "kv_cache_dtype": {
31
+ "fp16": "float16",
32
+ "q4f16": "float16"
33
+ }
34
+ },
35
+ "transformers_version": "4.51.3",
36
+ "use_cache": true,
37
+ "vocab_size": 49152
38
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
+ "transformers_version": "4.51.3"
7
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0297be9cf1822587077ce2c13ba5c375052e1107782cf461921ba87a7c82363c
3
+ size 651336704
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b563e541c277ffbd0e7232748b3448689fbec6b04a5573ccdecf06b4375625c
3
+ size 1076166785
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d184b56afe5fc616a4fd2f5562ac74acbb8066bd536f59eafa311c1dc0afc58
3
+ size 1000
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful AI assistant named SmolLM, trained by Hugging Face<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "extra_special_tokens": {},
150
+ "model_max_length": 8192,
151
+ "pad_token": "<|im_end|>",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/training_state.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"step": 187, "epoch": 0}
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_2-no_label/global_step_187/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "head_dim": 64,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 576,
12
+ "initializer_range": 0.041666666666666664,
13
+ "intermediate_size": 1536,
14
+ "is_llama_config": true,
15
+ "max_position_embeddings": 8192,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 9,
19
+ "num_hidden_layers": 30,
20
+ "num_key_value_heads": 3,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_interleaved": false,
25
+ "rope_scaling": null,
26
+ "rope_theta": 100000,
27
+ "tie_word_embeddings": true,
28
+ "torch_dtype": "float32",
29
+ "transformers.js_config": {
30
+ "kv_cache_dtype": {
31
+ "fp16": "float16",
32
+ "q4f16": "float16"
33
+ }
34
+ },
35
+ "transformers_version": "4.51.3",
36
+ "use_cache": true,
37
+ "vocab_size": 49152
38
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
+ "transformers_version": "4.51.3"
7
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5da4018d1538419d7d7c214a34d930459c7e841385934d18ec7d0a7fd1de1aa7
3
+ size 651336704
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86168b194bafddfb175a019c903209c4a1e5d0366ac169c9a5391fc8e1adcdd2
3
+ size 1076166849
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d184b56afe5fc616a4fd2f5562ac74acbb8066bd536f59eafa311c1dc0afc58
3
+ size 1000
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful AI assistant named SmolLM, trained by Hugging Face<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "extra_special_tokens": {},
150
+ "model_max_length": 8192,
151
+ "pad_token": "<|im_end|>",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/training_state.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"step": 187, "epoch": 0}
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_3-no_label/global_step_187/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_4-no_label/global_step_187/config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "head_dim": 64,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 576,
12
+ "initializer_range": 0.041666666666666664,
13
+ "intermediate_size": 1536,
14
+ "is_llama_config": true,
15
+ "max_position_embeddings": 8192,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 9,
19
+ "num_hidden_layers": 30,
20
+ "num_key_value_heads": 3,
21
+ "pad_token_id": 2,
22
+ "pretraining_tp": 1,
23
+ "rms_norm_eps": 1e-05,
24
+ "rope_interleaved": false,
25
+ "rope_scaling": null,
26
+ "rope_theta": 100000,
27
+ "tie_word_embeddings": true,
28
+ "torch_dtype": "float32",
29
+ "transformers.js_config": {
30
+ "kv_cache_dtype": {
31
+ "fp16": "float16",
32
+ "q4f16": "float16"
33
+ }
34
+ },
35
+ "transformers_version": "4.51.3",
36
+ "use_cache": true,
37
+ "vocab_size": 49152
38
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_4-no_label/global_step_187/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
+ "transformers_version": "4.51.3"
7
+ }
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_4-no_label/global_step_187/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_4-no_label/global_step_187/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:372ac9f4dabbaaf02a584bb5fc532c4601050cd46d904d27b9f6cc742ca60348
3
+ size 651336704
checkpoints/igsm-sft-HuggingFaceTB/SmolLM2-135M-Instruct-liger-round_4-no_label/global_step_187/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fdfc4a94cd27686cb6a7aa3b5951b9de9aeb5dcf6ca5db7f88f133a0f8b31444
3
+ size 1076166849