Qwark-4B / mergekit_config.yml
qingy2024's picture
Upload folder using huggingface_hub
2486e1a verified
raw
history blame contribute delete
429 Bytes
slices:
- sources:
- layer_range: [0, 6]
model: Qwen/Qwen2.5-3B
- sources:
- layer_range: [3, 12]
model: Qwen/Qwen2.5-3B
- sources:
- layer_range: [9, 18]
model: Qwen/Qwen2.5-3B
- sources:
- layer_range: [14, 24]
model: Qwen/Qwen2.5-3B
- sources:
- layer_range: [20, 30]
model: Qwen/Qwen2.5-3B
- sources:
- layer_range: [26, 36]
model: Qwen/Qwen2.5-3B
merge_method: passthrough
dtype: bfloat16