File size: 310 Bytes
215109b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
merge_method: slerp
models:
- model: hfl/llama-3-chinese-8b-instruct
  parameters:
    weight: 0.5
- model: meta-llama/Meta-Llama-3-8B-Instruct
  parameters:
    weight: 0.5
parameters:
  t: 0.5
dtype: bfloat16
tokenizer:
  source: union
base_model: meta-llama/Meta-Llama-3-8B-Instruct
write_readme: README.md