File size: 320 Bytes
785d857
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
base_model: Qwen/Qwen3-30B-A3B-Base
models:
  - model: allura-forge/q3-30b-ft-ep2-merged
    parameters:
      select_topk: 0.75
  - model: Gryphe/Pantheon-Proto-RP-1.8-30B-A3B
    parameters:
      select_topk: 0.4
  - model: Qwen/Qwen3-30B-A3B
    parameters:
      select_topk: 0.25
merge_method: sce
dtype: bfloat16