phixtral-4x2_8odd / mergekit_moe_config.yml
shadowml's picture
Upload folder using huggingface_hub
4477d85 verified
raw
history blame
232 Bytes
base_model: cognitivecomputations/dolphin-2_6-phi-2
gate_mode: cheap_embed
experts:
- source_model: cognitivecomputations/dolphin-2_6-phi-2
positive_prompts: [""]
- source_model: lxuechen/phi-2-dpo
positive_prompts: [""]