File size: 507 Bytes
e8a081f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
base_model: mistralai/Mistral-7B-v0.1
dtype: float16
merge_method: ties
modules:
  default:
    slices:
    - sources:
      - layer_range: [0, 32]
        model: mistralai/Mistral-7B-v0.1
      - layer_range: [0, 32]
        model: OpenPipe/mistral-ft-optimized-1218
        parameters:
          density: 0.5
          weight: 0.5
      - layer_range: [0, 32]
        model: mlabonne/NeuralHermes-2.5-Mistral-7B
        parameters:
          density: 0.5
          weight: 0.3
parameters:
  normalize: 1.0