Text Generation
scaling
GregorZiegltrumAA commited on
Commit
c8e3f2c
·
1 Parent(s): 079dc54
config.yml ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ optimizer:
2
+ allreduce_bucket_size: 500000000
3
+ beta1: 0.9
4
+ beta2: 0.95
5
+ debug_log: false
6
+ eps: 1e-08
7
+ gradient_clipping: 0.0
8
+ zero: true
9
+ zero_save_static: false
10
+ topology:
11
+ activation_checkpointing_type: disabled
12
+ global_batch_size: 1024
13
+ gradient_accumulation_steps: 2
14
+ micro_batch_size: 2
15
+ model_parallel_size: 1
16
+ pipe_parallel_size: 1
17
+ pipe_partition_method: balanced
18
+ pipe_partition_overwrite: null
19
+ sequence_parallel: false
20
+ trainer:
21
+ seed: 42
22
+ train_iterations: 72000
23
+ training:
24
+ allow_missing_params_in_optimizer: true
25
+ training_groups:
26
+ - group_name: param_group
27
+ independent_weight_decay: false
28
+ learning_rate_scheduler:
29
+ learning_rate: 0.0004
30
+ learning_rate_decay_iters: 72000
31
+ learning_rate_decay_style: cosine
32
+ learning_rate_minimum: 4e-05
33
+ learning_rate_warmup_steps: 500
34
+ parameters_exclude: []
35
+ weight_decay: 0.1
36
+ transformer_architecture:
37
+ attention_bias: false
38
+ attention_num_kv_heads: null
39
+ attention_qkv_in_one: true
40
+ dropout_after_attention: 0.0
41
+ dropout_after_mlp: 0.0
42
+ dropout_attention_probs: 0.0
43
+ dropout_embedding: 0.0
44
+ dropout_image_encoder: 0.0
45
+ hidden_size: 3072
46
+ image_encoder: false
47
+ key_query_norm: false
48
+ layernorm:
49
+ layernorm_epsilon: 1e-05
50
+ optimization_type: torch
51
+ local_attention_window_size: null
52
+ masked_softmax:
53
+ kernel: flash_attention
54
+ scale: 1.0
55
+ softmax_in_fp32: false
56
+ mlp_bias: false
57
+ mlp_factor: 2.6666666666666665
58
+ mlp_type: swiglu
59
+ norm_type: rms
60
+ num_attention_heads: 24
61
+ num_layers: 24
62
+ num_local_attention_heads: 0
63
+ precision: bfloat16
64
+ relative_position_embedding_type: rotary_complex
65
+ reset_attention_mask: false
66
+ reset_position_ids: false
67
+ rotary_embedding_base: 10000
68
+ rotary_percentage: 1.0
69
+ sequence_length: 4096
70
+ umup:
71
+ act_mult: 1.0
72
+ attn_mult: 1.0
73
+ enable: false
74
+ loss_mult: 1.0
75
+ residual_attn_ratio: 1.0
76
+ residual_mult: 1.0
77
+ vocab_file: null
78
+ vocab_size: 65536
79
+ weight_tying: false
model_state_layer_0_EmbeddingInput.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f1d371303e283c50a3ebef395014f30f2b80a7a2b536934bbd0a5da3ee1654e
3
+ size 402654667
model_state_layer_10_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:279600a631b30a698c88e1d1377bdda48df618adf31d0b54e2ab6c5caaa36139
3
+ size 226507949
model_state_layer_11_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:496fc8bebc7cf59bf63634d1e0ce4afc67e6752bb2cff0d33a765aaa0d0d7454
3
+ size 226507949
model_state_layer_12_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83fd5a6c39a89812cb44e7377c49783e0358369605bc3d8531a2df52a868303b
3
+ size 226507949
model_state_layer_13_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94bea416b166360c016f37776f5936f995e3c51d563efa4fa36aa9dc3417b891
3
+ size 226507949
model_state_layer_14_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85aca802e4bd4bc58629e680c3bae2303f42a71c59e94164671751deae985441
3
+ size 226507949
model_state_layer_15_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c9aafccc6a66eb2654d38c557fa1b667d091f4d10ffa396da1a604865285ee1
3
+ size 226507949
model_state_layer_16_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b08dec40782e34bb7b2fd9768cc40bec33bfd269626082a129b0e31cb63fe4a
3
+ size 226507949
model_state_layer_17_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8facf0de83c522b5e1a2927f31c218e02b3cd66902bac16962fa6638925e92e3
3
+ size 226507949
model_state_layer_18_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d3538fa2621c5a3cde89e916e466fa1b53487eff4f52abeda5cd2c767b7e8c4
3
+ size 226507949
model_state_layer_19_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f398d62b47fd4c1dd8c41fd9cad8717a10bd846281432746bfa0a5a42d33069
3
+ size 226507949
model_state_layer_1_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22ba50060e215bc0a76f9adcfef5100a83dd2bde50603f3cf210a25c138bf044
3
+ size 226507938
model_state_layer_20_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3994a46ef243df7051db9219c9d558c07b030aee0016ba9ae7d0fdc95112537c
3
+ size 226507949
model_state_layer_21_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6da50e1ccab60877a6f1863db06f52103c568a2b97de144698f81795c2a73df
3
+ size 226507949
model_state_layer_22_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d0975696b3f64194d518c9e059f5a865359faf9c3e22f58872c4eaaf1bc06a9
3
+ size 226507949
model_state_layer_23_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8990412801596e25c042ddd460b7368d0c07d3d4ad0dccf3cebc2ae102fceb95
3
+ size 226507949
model_state_layer_24_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a32d6f70aea4bbef72a585c12228a9924e865725273ac87529e787b32bd18a5
3
+ size 226507949
model_state_layer_25_LayerNormWrapper.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c017a392c847d6c0c04de327790d9f8b0817f54394af5228d91a16a0e4a37d0
3
+ size 7602
model_state_layer_26_TransformerLMHead.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c137813d0918d1e7ab247991e0050b7e6d3a5ead4b500f0f1ea4bca9ca8fa04
3
+ size 402654632
model_state_layer_2_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc0f57189f04339e1b0669bc0ce74c992fd2f7a913ed8be4a680e84bac69e88a
3
+ size 226507938
model_state_layer_3_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcbc38d220bb1f92aa326a856d8a8aed2cb2af71d2e20c24a7e80a14a7cbeee6
3
+ size 226507938
model_state_layer_4_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c261eff4096cf1f37b2e19dc74ba02422be2a9e9f60e37d77ad50fb9fbacf991
3
+ size 226507938
model_state_layer_5_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:824db9a4c41d7d319c24ec335ed82f89f1de9feee5329a015d441bbbaf52a294
3
+ size 226507938
model_state_layer_6_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e27801071f273642ae8632afea68c5c153f39e6cca99f0f03cb8aa6b131d44aa
3
+ size 226507938
model_state_layer_7_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0967597332f8332ad8ac2d660606d115a9c982f23e36e26fce229522c4136f10
3
+ size 226507938
model_state_layer_8_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c582e8756ab37083b93cb0c4196c5b400bb34bc6ec30b58ed7d9714d1b792a70
3
+ size 226507938
model_state_layer_9_TransformerLayer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b67e874da782d309fb3b2215f5cf6ae694e533b09fd187537beb4c2b2bba02a
3
+ size 226507938
vocab.json ADDED
The diff for this file is too large to render. See raw diff