Add files using upload-large-folder tool
Browse files- .gitattributes +4 -0
- Qwen3-30B-A3B-UD-IQ1_S.gguf +3 -0
- Qwen3-30B-A3B-UD-IQ2_M.gguf +3 -0
- Qwen3-30B-A3B-UD-Q2_K_XL.gguf +3 -0
- Qwen3-30B-A3B-UD-Q4_K_XL.gguf +3 -0
- config.json +39 -0
.gitattributes
CHANGED
@@ -33,3 +33,7 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
Qwen3-30B-A3B-UD-IQ1_S.gguf filter=lfs diff=lfs merge=lfs -text
|
37 |
+
Qwen3-30B-A3B-UD-IQ2_M.gguf filter=lfs diff=lfs merge=lfs -text
|
38 |
+
Qwen3-30B-A3B-UD-Q2_K_XL.gguf filter=lfs diff=lfs merge=lfs -text
|
39 |
+
Qwen3-30B-A3B-UD-Q4_K_XL.gguf filter=lfs diff=lfs merge=lfs -text
|
Qwen3-30B-A3B-UD-IQ1_S.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2e00680c1397c1e47f87f4c7c8ce74cb96967f24d4b9138909ebf5e53daa79fa
|
3 |
+
size 9021280032
|
Qwen3-30B-A3B-UD-IQ2_M.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:05fbcb913f3f39100a22991e164af72307b350ec9940e82103771fbc2917970d
|
3 |
+
size 10865577760
|
Qwen3-30B-A3B-UD-Q2_K_XL.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:12d122cad286c90008da1734ed02959d5a846c6f5f5cbc0f1f3f6786c82ba574
|
3 |
+
size 11814276896
|
Qwen3-30B-A3B-UD-Q4_K_XL.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1f06c359e1d6ee8cc7acc9d45b7e68e94f31dad9bca19422d96747ae11442469
|
3 |
+
size 17715662624
|
config.json
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"Qwen3MoeForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_bias": false,
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"decoder_sparse_step": 1,
|
8 |
+
"eos_token_id": 151645,
|
9 |
+
"head_dim": 128,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 2048,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 8192,
|
14 |
+
"max_position_embeddings": 40960,
|
15 |
+
"max_window_layers": 48,
|
16 |
+
"mlp_only_layers": [],
|
17 |
+
"model_type": "qwen3_moe",
|
18 |
+
"moe_intermediate_size": 768,
|
19 |
+
"norm_topk_prob": true,
|
20 |
+
"num_attention_heads": 32,
|
21 |
+
"num_experts": 128,
|
22 |
+
"num_experts_per_tok": 8,
|
23 |
+
"num_hidden_layers": 48,
|
24 |
+
"num_key_value_heads": 4,
|
25 |
+
"output_router_logits": false,
|
26 |
+
"pad_token_id": 151654,
|
27 |
+
"rms_norm_eps": 1e-06,
|
28 |
+
"rope_scaling": null,
|
29 |
+
"rope_theta": 1000000.0,
|
30 |
+
"router_aux_loss_coef": 0.001,
|
31 |
+
"sliding_window": null,
|
32 |
+
"tie_word_embeddings": false,
|
33 |
+
"torch_dtype": "bfloat16",
|
34 |
+
"transformers_version": "4.52.0.dev0",
|
35 |
+
"unsloth_fixed": true,
|
36 |
+
"use_cache": true,
|
37 |
+
"use_sliding_window": false,
|
38 |
+
"vocab_size": 151936
|
39 |
+
}
|