dtype: float32 out_dtype: bfloat16 merge_method: model_stock base_model: mistralai/Mistral-Nemo-Instruct-2407 models: - model: Aleteian/DarkCamelot - model: HumanLLMs/Human-Like-Mistral-Nemo-Instruct-2407 parameters: weight: [0.5, 2] - model: LatitudeGames/Wayfarer-12B parameters: weight: [0.5, 1.5, 0] - model: mergekit-community/MN-Hekate-Kleidoukhos-12B - model: mergekit-community/MN-Hekate-Panopaia-12B parameters: weight: 2 - model: mergekit-community/MN-Nyx-Chthonia-12B - model: mistralai/Mistral-Nemo-Base-2407 parameters: weight: [3, 1] - model: nbeerbower/Mistral-Nemo-Gutenberg-Doppel-12B parameters: weight: [0, 1, 1.5] - model: nbeerbower/mistral-nemo-wissenschaft-12B+jtatman/mistral_nemo_12b_reasoning_psychology_lora parameters: weight: [2, 1, 0] tokenizer: source: union tokens: "[INST]": source: mistralai/Mistral-Nemo-Instruct-2407 force: true "[/INST]": source: mistralai/Mistral-Nemo-Instruct-2407 force: true "<|im_start|>": source: mergekit-community/MN-Nyx-Chthonia-12B force: true "<|im_end|>": source: mergekit-community/MN-Nyx-Chthonia-12B force: true