aaron-di's picture
Upload folder using huggingface_hub
e8052ae verified
raw
history blame contribute delete
564 Bytes
slices:
- sources:
- model: liminerity/M7-7b
layer_range: [0, 32]
- model: AurelPx/Percival_01-7b-slerp
layer_range: [0, 32]
merge_method: slerp
base_model: liminerity/M7-7b
parameters:
t:
- filter: self_attn
value: [0.3971503740486436, 0.6649148917405439, 0.9727902590850609, 0.24826875048537567, 0.9603913534901606]
- filter: mlp
value: [0.6028496259513564, 0.33508510825945614, 0.027209740914939107, 0.7517312495146243, 0.03960864650983942]
- value: 0.7872855893513945
dtype: bfloat16
random_seed: 0