Queen-2.5-14B-aka / mergekit_config.yml
AIgotahole's picture
Upload folder using huggingface_hub
ca04316 verified
raw
history blame contribute delete
617 Bytes
models:
- model: Sao10K/14B-Qwen2.5-Kunou-v1
- model: sometimesanotion/Qwenvergence-14B-v13-Prose-DS
parameters:
density: [0.16, 0.26, 0.36, 0.46, 0.56, 0.46, 0.36, 0.26, 0.16]
weight: [0.166, 0.496, 0.496, 0.166, 0.166, 0.496, 0.496, 0.166]
- model: deepcogito/cogito-v1-preview-qwen-14B
parameters:
density: [0.56, 0.46, 0.36, 0.26, 0.16, 0.26, 0.36, 0.46, 0.56]
weight: [0.496, 0.166, 0.166, 0.496, 0.496, 0.166, 0.166, 0.496]
merge_method: breadcrumbs
base_model: Sao10K/14B-Qwen2.5-Kunou-v1
parameters:
gamma: 0.06
lambda: 0.96
tokenizer_source: base
dtype: bfloat16