Add files using upload-large-folder tool
Browse files- Qwen3-30B-A3B-Q4_K_M.gguf +2 -2
- Qwen3-30B-A3B-UD-Q2_K_XL.gguf +2 -2
- Qwen3-30B-A3B-UD-Q3_K_XL.gguf +2 -2
- Qwen3-30B-A3B-UD-Q4_K_XL.gguf +2 -2
- Qwen3-30B-A3B-UD-Q5_K_XL.gguf +2 -2
- config.json +1 -1
Qwen3-30B-A3B-Q4_K_M.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9f1a24700a339b09c06009b729b5c809e0b64c213b8af5b711b3dbdfd0c5ba48
|
3 |
+
size 18556686912
|
Qwen3-30B-A3B-UD-Q2_K_XL.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a8e68b6db5c20612c29178f3027330007e7c45aca6a77d8a03a493ac6aaa9d03
|
3 |
+
size 11814277696
|
Qwen3-30B-A3B-UD-Q3_K_XL.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f1b8111d3e49a0238fcaa77ec9ee24eb9805266b5e85bf8e54c12dedb02cf4ff
|
3 |
+
size 13833048640
|
Qwen3-30B-A3B-UD-Q4_K_XL.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:263346cdf8c9824cc332d2b00a84100a5be231ac600e7a875c6a2b47c9802f57
|
3 |
+
size 17715663424
|
Qwen3-30B-A3B-UD-Q5_K_XL.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f284af35140194f073985a093f6d257cb7060784ecbfeb52c15f9545dfa4f434
|
3 |
+
size 21740302912
|
config.json
CHANGED
@@ -31,7 +31,7 @@
|
|
31 |
"sliding_window": null,
|
32 |
"tie_word_embeddings": false,
|
33 |
"torch_dtype": "bfloat16",
|
34 |
-
"transformers_version": "4.
|
35 |
"unsloth_fixed": true,
|
36 |
"use_cache": true,
|
37 |
"use_sliding_window": false,
|
|
|
31 |
"sliding_window": null,
|
32 |
"tie_word_embeddings": false,
|
33 |
"torch_dtype": "bfloat16",
|
34 |
+
"transformers_version": "4.51.3",
|
35 |
"unsloth_fixed": true,
|
36 |
"use_cache": true,
|
37 |
"use_sliding_window": false,
|