merge_method: dare_linear | |
models: | |
- model: hfl/llama-3-chinese-8b-instruct | |
parameters: | |
weight: 0.5 | |
density: 0.4 | |
- model: meta-llama/Meta-Llama-3-8B-Instruct | |
parameters: | |
weight: 0.5 | |
density: 0.4 | |
parameters: | |
int8_mask: true | |
dtype: bfloat16 | |
tokenizer: | |
source: union | |
base_model: meta-llama/Meta-Llama-3-8B-Instruct | |
write_readme: README.md | |