Llama-3-SnowyRP-8B-V1-B / mergekit_config.yml
Masterjp123's picture
Upload folder using huggingface_hub
9e68c51 verified
raw
history blame
650 Bytes
base_model: kuotient/Meta-Llama-3-8B-Instruct
dtype: float16
merge_method: ties
parameters:
int8_mask: 1.0
normalize: 1.0
slices:
- sources:
- layer_range: [0, 32]
model: Masterjp123/Llama-3-SnowyRP-8B-V1
parameters:
density: [1.0, 0.7, 0.1]
weight: 1.0
- layer_range: [0, 32]
model: cgato/L3-TheSpice-8b-v0.1.3
parameters:
density: 0.5
weight: [0.0, 0.3, 0.7, 1.0]
- layer_range: [0, 32]
model: Sao10K/L3-Solana-8B-v1
parameters:
density: 0.33
weight:
- filter: mlp
value: 0.5
- value: 0.0
- layer_range: [0, 32]
model: kuotient/Meta-Llama-3-8B-Instruct