File size: 451 Bytes
9df7ddd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
merge_method: dare_ties
base_model: mlabonne/AlphaMonarch-7B
parameters:
  normalize: true
models:
  - model: jeiku/Luna_7B
    parameters:
      weight: 0.75
  - model: jeiku/Cookie_7B
    parameters:
      weight: 1
  - model: jeiku/NarrativeNexus_7B
    parameters:
      weight: 0.5
  - model: CultriX/NeuralTrix-bf16
    parameters:
      weight: 0.75
  - model: mlabonne/AlphaMonarch-7B
    parameters:
      weight: 1           
dtype: float16