Llama3-15B-lingyang-v0.1 / mergekit_config.yml
wwe180's picture
Upload folder using huggingface_hub
594d8d0 verified
raw
history blame
586 Bytes
slices:
- sources:
- model: "hfl/llama-3-chinese-8b-instruct-v2"
layer_range: [0, 10]
- sources:
- model: "NousResearch/Hermes-2-Theta-Llama-3-8B"
layer_range: [0, 20]
- sources:
- model: "hfl/llama-3-chinese-8b-instruct-v2"
layer_range: [10, 20]
- sources:
- model: "NousResearch/Hermes-2-Theta-Llama-3-8B"
layer_range: [20, 32]
- sources:
- model: "hfl/llama-3-chinese-8b-instruct-v2"
layer_range: [20, 32]
merge_method: passthrough
base_model: "NousResearch/Hermes-2-Theta-Llama-3-8B"
dtype: bfloat16