Updated and moved existing to merged_models base_model tag in README.md
Browse files
README.md
CHANGED
@@ -1,10 +1,14 @@
|
|
1 |
---
|
|
|
|
|
|
|
2 |
license: cc-by-nc-4.0
|
3 |
-
|
4 |
- Alsebay/NarumashiRTS-V2
|
5 |
- SanjiWatsuki/Kunoichi-DPO-v2-7B
|
6 |
- Nitral-AI/KukulStanta-7B
|
7 |
-
|
|
|
8 |
tags:
|
9 |
- moe
|
10 |
- merge
|
@@ -15,9 +19,6 @@ tags:
|
|
15 |
- text-generation
|
16 |
- autotrain_compatible
|
17 |
- endpoints_compatible
|
18 |
-
pipeline_tag: text-generation
|
19 |
-
inference: false
|
20 |
-
quantized_by: Suparious
|
21 |
---
|
22 |
# Alsebay/NaruMOE-3x7B-v2 AWQ
|
23 |
|
|
|
1 |
---
|
2 |
+
base_model: Alsebay/NaruMOE-3x7B-v2
|
3 |
+
inference: false
|
4 |
+
library_name: transformers
|
5 |
license: cc-by-nc-4.0
|
6 |
+
merged_models:
|
7 |
- Alsebay/NarumashiRTS-V2
|
8 |
- SanjiWatsuki/Kunoichi-DPO-v2-7B
|
9 |
- Nitral-AI/KukulStanta-7B
|
10 |
+
pipeline_tag: text-generation
|
11 |
+
quantized_by: Suparious
|
12 |
tags:
|
13 |
- moe
|
14 |
- merge
|
|
|
19 |
- text-generation
|
20 |
- autotrain_compatible
|
21 |
- endpoints_compatible
|
|
|
|
|
|
|
22 |
---
|
23 |
# Alsebay/NaruMOE-3x7B-v2 AWQ
|
24 |
|