DolphinLake-7B / mergekit_config.yml
Noodlz's picture
Upload 8 files
7ff08c6 verified
raw
history blame
572 Bytes
merge_method: dare_ties
parameters:
int8_mask: true
t:
- filter: self_attn
value: [0, 0.5, 0.3, 0.7, 1]
- filter: mlp
value: [1, 0.5, 0.7, 0.3, 0]
- value: 0.5 # fallback for rest of tensors
embed_slerp: true
models:
- model: cognitivecomputations/dolphin-2.8-mistral-7b-v02
# No parameters necessary for base model
- model: senseable/WestLake-7B-v2
parameters:
density: 0.58
weight: 0.8
base_model: cognitivecomputations/dolphin-2.8-mistral-7b-v02
tokenizer_source: model:senseable/WestLake-7B-v2
dtype: bfloat16