Upload folder using huggingface_hub
Browse files- .gitattributes +2 -0
- checkpoint-131000/config.json +79 -0
- checkpoint-131000/generation_config.json +6 -0
- checkpoint-131000/model.safetensors +3 -0
- checkpoint-131000/optimizer.pt +3 -0
- checkpoint-131000/rng_state_0.pth +3 -0
- checkpoint-131000/rng_state_1.pth +3 -0
- checkpoint-131000/rng_state_2.pth +3 -0
- checkpoint-131000/rng_state_3.pth +3 -0
- checkpoint-131000/rng_state_4.pth +3 -0
- checkpoint-131000/rng_state_5.pth +3 -0
- checkpoint-131000/rng_state_6.pth +3 -0
- checkpoint-131000/rng_state_7.pth +3 -0
- checkpoint-131000/scheduler.pt +3 -0
- checkpoint-131000/trainer_state.json +3 -0
- checkpoint-131000/training_args.bin +3 -0
- checkpoint-132000/config.json +79 -0
- checkpoint-132000/generation_config.json +6 -0
- checkpoint-132000/model.safetensors +3 -0
- checkpoint-132000/optimizer.pt +3 -0
- checkpoint-132000/rng_state_0.pth +3 -0
- checkpoint-132000/rng_state_1.pth +3 -0
- checkpoint-132000/rng_state_2.pth +3 -0
- checkpoint-132000/rng_state_3.pth +3 -0
- checkpoint-132000/rng_state_4.pth +3 -0
- checkpoint-132000/rng_state_5.pth +3 -0
- checkpoint-132000/rng_state_6.pth +3 -0
- checkpoint-132000/rng_state_7.pth +3 -0
- checkpoint-132000/scheduler.pt +3 -0
- checkpoint-132000/trainer_state.json +3 -0
- checkpoint-132000/training_args.bin +3 -0
.gitattributes
CHANGED
@@ -52,3 +52,5 @@ checkpoint-120000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
|
52 |
checkpoint-128000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
53 |
checkpoint-129000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
54 |
checkpoint-130000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
52 |
checkpoint-128000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
53 |
checkpoint-129000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
54 |
checkpoint-130000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
55 |
+
checkpoint-131000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
56 |
+
checkpoint-132000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
|
checkpoint-131000/config.json
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"ar_steps": 1,
|
3 |
+
"architectures": [
|
4 |
+
"DiffVLMDiffusion"
|
5 |
+
],
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 151643,
|
8 |
+
"condition_layer": -1,
|
9 |
+
"eos_token_id": 151645,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 1536,
|
12 |
+
"image_token_id": 151655,
|
13 |
+
"img_cross_attention_dim": 2048,
|
14 |
+
"img_diffuser_depth": 2,
|
15 |
+
"img_ffn_dim_multiplier": null,
|
16 |
+
"img_hidden_size": 1536,
|
17 |
+
"img_multiple_of": 256,
|
18 |
+
"img_norm_eps": 1e-05,
|
19 |
+
"img_num_attention_heads": 12,
|
20 |
+
"img_num_kv_heads": 12,
|
21 |
+
"img_qk_norm": true,
|
22 |
+
"in_channels": 32,
|
23 |
+
"initializer_range": 0.02,
|
24 |
+
"inject_img_diffuser": false,
|
25 |
+
"input_size": 32,
|
26 |
+
"intermediate_size": 8960,
|
27 |
+
"layer_group_size": 7,
|
28 |
+
"layerwise_start_idx": 0,
|
29 |
+
"lora_alpha": 128,
|
30 |
+
"lora_bias": "none",
|
31 |
+
"lora_dropout": 0.05,
|
32 |
+
"lora_enable": false,
|
33 |
+
"lora_r": 64,
|
34 |
+
"max_position_embeddings": 32768,
|
35 |
+
"max_window_layers": 28,
|
36 |
+
"model_type": "qwen2_vl",
|
37 |
+
"non_linearity": 1,
|
38 |
+
"norm_elementwise_affine": true,
|
39 |
+
"num_attention_heads": 12,
|
40 |
+
"num_hidden_layers": 28,
|
41 |
+
"num_key_value_heads": 2,
|
42 |
+
"patch_size": 1,
|
43 |
+
"repa_coeff": 0.1,
|
44 |
+
"repa_layers": "2",
|
45 |
+
"repa_shared": false,
|
46 |
+
"rms_norm_eps": 1e-06,
|
47 |
+
"rope_scaling": {
|
48 |
+
"mrope_section": [
|
49 |
+
16,
|
50 |
+
24,
|
51 |
+
24
|
52 |
+
],
|
53 |
+
"rope_type": "default",
|
54 |
+
"type": "default"
|
55 |
+
},
|
56 |
+
"rope_theta": 1000000.0,
|
57 |
+
"sample_size": 128,
|
58 |
+
"sampling_steps": 28,
|
59 |
+
"sliding_window": null,
|
60 |
+
"tie_word_embeddings": true,
|
61 |
+
"torch_dtype": "bfloat16",
|
62 |
+
"transformers_version": "4.47.0",
|
63 |
+
"use_cache": true,
|
64 |
+
"use_repa": false,
|
65 |
+
"use_residual_attn": true,
|
66 |
+
"use_sliding_window": false,
|
67 |
+
"vae_path": "mit-han-lab/dc-ae-f32c32-in-1.0-diffusers",
|
68 |
+
"video_token_id": 151656,
|
69 |
+
"vision_config": {
|
70 |
+
"hidden_size": 1536,
|
71 |
+
"in_chans": 3,
|
72 |
+
"model_type": "qwen2_vl",
|
73 |
+
"spatial_patch_size": 14
|
74 |
+
},
|
75 |
+
"vision_end_token_id": 151653,
|
76 |
+
"vision_start_token_id": 151652,
|
77 |
+
"vision_token_id": 151654,
|
78 |
+
"vocab_size": 151936
|
79 |
+
}
|
checkpoint-131000/generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 151643,
|
4 |
+
"eos_token_id": 151645,
|
5 |
+
"transformers_version": "4.47.0"
|
6 |
+
}
|
checkpoint-131000/model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:272970ca72d02cf05128567e15cf424e0f87bb30147eb51a20871930040aa1ea
|
3 |
+
size 4411759432
|
checkpoint-131000/optimizer.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2c7b25ef8cb2276619b79f584505a761f210282804a2fcaf71acef2b42567aa4
|
3 |
+
size 6332050591
|
checkpoint-131000/rng_state_0.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:84cf9a840fc24d68783b840c0ea955e1d802e11972fccf87157e235422498c12
|
3 |
+
size 16389
|
checkpoint-131000/rng_state_1.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f9c03eec53e8258ebf4b4d31f98b94f0e8601bf99096a4c616e85d931c40ca3e
|
3 |
+
size 16389
|
checkpoint-131000/rng_state_2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8fab175228bcda009a32c57d871e48768bdfc1c82dc7adb4f76ecae2810e261f
|
3 |
+
size 16389
|
checkpoint-131000/rng_state_3.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f8d92e057b762f76893b8c41de19b6cc24aa4a774cae31e7037b72db6cf7b82e
|
3 |
+
size 16389
|
checkpoint-131000/rng_state_4.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:62e30a0c500e500df480247d46482c07b8988f4895d80e31c987861f1306e722
|
3 |
+
size 16389
|
checkpoint-131000/rng_state_5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:62b8df5f46ffb4e0197dbc504af63cf3e170c1f0e3c8f1159c9927707b87a2d0
|
3 |
+
size 16389
|
checkpoint-131000/rng_state_6.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c3e9ea4c084a5f26f10f8956985aaec98506fde7f65b49255d50ee94d343c09c
|
3 |
+
size 16389
|
checkpoint-131000/rng_state_7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4b74632035ef41bb4bf8f97678d2da1d53e5d9f312b68a7bd0abe01bede3ea2e
|
3 |
+
size 16389
|
checkpoint-131000/scheduler.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:717902f819098ee4bd0a6eb70a6b2c7dd5bc2f52ce9a2c6325a65a5c6d0dd734
|
3 |
+
size 1465
|
checkpoint-131000/trainer_state.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1e0ae8bc94566561c940d3936b80493fa799f825ec2f347c2a8040488a17435b
|
3 |
+
size 20106010
|
checkpoint-131000/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6e7a7243197dd963f39bce225f40e21f97143793760b132f227c444418f9e7e4
|
3 |
+
size 6481
|
checkpoint-132000/config.json
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"ar_steps": 1,
|
3 |
+
"architectures": [
|
4 |
+
"DiffVLMDiffusion"
|
5 |
+
],
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 151643,
|
8 |
+
"condition_layer": -1,
|
9 |
+
"eos_token_id": 151645,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 1536,
|
12 |
+
"image_token_id": 151655,
|
13 |
+
"img_cross_attention_dim": 2048,
|
14 |
+
"img_diffuser_depth": 2,
|
15 |
+
"img_ffn_dim_multiplier": null,
|
16 |
+
"img_hidden_size": 1536,
|
17 |
+
"img_multiple_of": 256,
|
18 |
+
"img_norm_eps": 1e-05,
|
19 |
+
"img_num_attention_heads": 12,
|
20 |
+
"img_num_kv_heads": 12,
|
21 |
+
"img_qk_norm": true,
|
22 |
+
"in_channels": 32,
|
23 |
+
"initializer_range": 0.02,
|
24 |
+
"inject_img_diffuser": false,
|
25 |
+
"input_size": 32,
|
26 |
+
"intermediate_size": 8960,
|
27 |
+
"layer_group_size": 7,
|
28 |
+
"layerwise_start_idx": 0,
|
29 |
+
"lora_alpha": 128,
|
30 |
+
"lora_bias": "none",
|
31 |
+
"lora_dropout": 0.05,
|
32 |
+
"lora_enable": false,
|
33 |
+
"lora_r": 64,
|
34 |
+
"max_position_embeddings": 32768,
|
35 |
+
"max_window_layers": 28,
|
36 |
+
"model_type": "qwen2_vl",
|
37 |
+
"non_linearity": 1,
|
38 |
+
"norm_elementwise_affine": true,
|
39 |
+
"num_attention_heads": 12,
|
40 |
+
"num_hidden_layers": 28,
|
41 |
+
"num_key_value_heads": 2,
|
42 |
+
"patch_size": 1,
|
43 |
+
"repa_coeff": 0.1,
|
44 |
+
"repa_layers": "2",
|
45 |
+
"repa_shared": false,
|
46 |
+
"rms_norm_eps": 1e-06,
|
47 |
+
"rope_scaling": {
|
48 |
+
"mrope_section": [
|
49 |
+
16,
|
50 |
+
24,
|
51 |
+
24
|
52 |
+
],
|
53 |
+
"rope_type": "default",
|
54 |
+
"type": "default"
|
55 |
+
},
|
56 |
+
"rope_theta": 1000000.0,
|
57 |
+
"sample_size": 128,
|
58 |
+
"sampling_steps": 28,
|
59 |
+
"sliding_window": null,
|
60 |
+
"tie_word_embeddings": true,
|
61 |
+
"torch_dtype": "bfloat16",
|
62 |
+
"transformers_version": "4.47.0",
|
63 |
+
"use_cache": true,
|
64 |
+
"use_repa": false,
|
65 |
+
"use_residual_attn": true,
|
66 |
+
"use_sliding_window": false,
|
67 |
+
"vae_path": "mit-han-lab/dc-ae-f32c32-in-1.0-diffusers",
|
68 |
+
"video_token_id": 151656,
|
69 |
+
"vision_config": {
|
70 |
+
"hidden_size": 1536,
|
71 |
+
"in_chans": 3,
|
72 |
+
"model_type": "qwen2_vl",
|
73 |
+
"spatial_patch_size": 14
|
74 |
+
},
|
75 |
+
"vision_end_token_id": 151653,
|
76 |
+
"vision_start_token_id": 151652,
|
77 |
+
"vision_token_id": 151654,
|
78 |
+
"vocab_size": 151936
|
79 |
+
}
|
checkpoint-132000/generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 151643,
|
4 |
+
"eos_token_id": 151645,
|
5 |
+
"transformers_version": "4.47.0"
|
6 |
+
}
|
checkpoint-132000/model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6c19d3488f87200598f7e793ed12ecced6252d490a108807c9ab7668b0eda7fd
|
3 |
+
size 4411759432
|
checkpoint-132000/optimizer.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a3c09abe462d93f50b8384baff400c867642f9c9fe213bdee2253b0a339b58e8
|
3 |
+
size 6332050591
|
checkpoint-132000/rng_state_0.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:48d181b14c34e270b5aff8c839c4f4e13a1ecfdee3a731aabb74d38ce75879d4
|
3 |
+
size 16389
|
checkpoint-132000/rng_state_1.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5b3a564f4180dac38ce3a6de0600d9e124b2bc4b3235fb2e1d54b1c6fac339d4
|
3 |
+
size 16389
|
checkpoint-132000/rng_state_2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c584ead737f1d5c752b0633a11b5459e906beb8c2018bbd5ca6dcc338a3f8565
|
3 |
+
size 16389
|
checkpoint-132000/rng_state_3.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:528d6f0aa8d1f6b7129327e22a17adbfe1009064be3d712b799f2f658f9e2e44
|
3 |
+
size 16389
|
checkpoint-132000/rng_state_4.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:59394be60693fdeaf3241194318cd8b3a233651beed3ca39bedd61ee36d7e6e4
|
3 |
+
size 16389
|
checkpoint-132000/rng_state_5.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:353ffacc1fc3459c8c24cfc363783d06086d20aadc7f128c2c488490862ac64f
|
3 |
+
size 16389
|
checkpoint-132000/rng_state_6.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:28f0f25a8846c263b5531e1a3c8d9145f042fd8af3781d90b23a444f9b5afe16
|
3 |
+
size 16389
|
checkpoint-132000/rng_state_7.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cc1203adef13dacdfdfda52ec0e2d7776f584405950da0e6773007d72fa60de6
|
3 |
+
size 16389
|
checkpoint-132000/scheduler.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c20509369fa849c7f9a0f4dc7c2c0148b87a8eb59856974050bdd32d8b2cf244
|
3 |
+
size 1465
|
checkpoint-132000/trainer_state.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:27aaf8f5a404d1ea4313e70a989399926b91c7e3c5e313f1da199c726d19ceab
|
3 |
+
size 20260663
|
checkpoint-132000/training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e7b9897afd7104fe65e590877a80c1f7e73fcb61e07388fb015fe42397f1ddb3
|
3 |
+
size 6417
|