Llama-3.1-SuperNova-Lite-14B / mergekit_config.yml
Joseph717171's picture
Upload folder using huggingface_hub
8ba0634 verified
raw
history blame contribute delete
929 Bytes
slices:
- sources:
- model: "/Users/jsarnecki/opt/Workspace/arcee-ai/Llama-3.1-SuperNova-Lite"
layer_range: [0, 8]
- sources:
- model: "/Users/jsarnecki/opt/Workspace/arcee-ai/Llama-3.1-SuperNova-Lite"
layer_range: [4, 12]
- sources:
- model: "/Users/jsarnecki/opt/Workspace/arcee-ai/Llama-3.1-SuperNova-Lite"
layer_range: [8, 16]
- sources:
- model: "/Users/jsarnecki/opt/Workspace/arcee-ai/Llama-3.1-SuperNova-Lite"
layer_range: [12, 20]
- sources:
- model: "/Users/jsarnecki/opt/Workspace/arcee-ai/Llama-3.1-SuperNova-Lite"
layer_range: [16, 24]
- sources:
- model: "/Users/jsarnecki/opt/Workspace/arcee-ai/Llama-3.1-SuperNova-Lite"
layer_range: [20, 28]
- sources:
- model: "/Users/jsarnecki/opt/Workspace/arcee-ai/Llama-3.1-SuperNova-Lite"
layer_range: [24, 32]
merge_method: passthrough
dtype: float32
out_dtype: bfloat16