base_model: AdityaXPV/Mistral-7B-law-sage-v0.3 dtype: float16 merge_method: ties parameters: int8_mask: 1.0 normalize: 0.0 slices: - sources: - layer_range: [0, 32] model: shibiyaj/lawGPT-chat parameters: density: 0.5 weight: 0.5 - layer_range: [0, 32] model: AdityaXPV/Mistral-7B-law-sage-v0.3