L3.1-Boshima-b / mergekit_config.yml
chargoddard's picture
Upload folder using huggingface_hub
44ebc6d verified
raw
history blame contribute delete
602 Bytes
models:
- model: ArliAI/Llama-3.1-8B-ArliAI-Formax-v1.0
- model: mergekit-community/L3-Boshima-a
merge_method: slerp
base_model: ArliAI/Llama-3.1-8B-ArliAI-Formax-v1.0
parameters:
t:
- filter: v_proj
value: [0, 0, 1, 1, 1, 1, 1, 1, 0.9, 0, 0]
- filter: o_proj
value: [0, 0, 1, 1, 1, 1, 1, 1, 0.9, 0, 0]
- filter: up_proj
value: [0, 0, 1, 1, 1, 1, 1, 1, 0.9, 0, 0]
- filter: gate_proj
value: [0, 0, 1, 1, 1, 1, 1, 1, 0.9, 0, 0]
- filter: down_proj
value: [0, 0, 1, 1, 1, 1, 1, 1, 0.9, 0, 0]
- value: 0.88
dtype: bfloat16