models: - model: NousResearch/DeepHermes-3-Mistral-24B-Preview parameters: weight: 1.0 density: 0.85 - model: TheDrummer/Cydonia-24B-v2.1 parameters: weight: 0.24 density: 0.69 - model: TheDrummer/Cydonia-24B-v2 parameters: weight: 0.14 density: 0.67 - model: ReadyArt/Pawdistic-Fur-Mittens-V0.1-24B parameters: weight: 0.16 density: 0.67 - model: Gryphe/Pantheon-RP-1.8-24b-Small-3.1 parameters: weight: 0.26 density: 0.75 - model: PocketDoc/Dans-PersonalityEngine-V1.2.0-24b parameters: weight: 0.27 density: 0.71 - model: Trappu/Picaro-Apparatus-ties0.7-24b parameters: weight: 0.27 density: 0.7 - model: BeaverAI/MS-2501-DPE-QwQify-v0.1-24B parameters: weight: 0.22 density: 0.58 - model: ToastyPigeon/new-ms-rp-test-v0-v2 parameters: weight: 0.22 density: 0.71 - model: v2ray/GPT4chan-24B parameters: weight: 0.24 density: 0.72 - model: allura-org/Mistral-Small-24b-Sertraline-0304 parameters: weight: 0.21 density: 0.68 - model: huihui-ai/Mistral-Small-24B-Instruct-2501-abliterated parameters: weight: 0.21 density: 0.68 - model: cognitivecomputations/Dolphin3.0-Mistral-24B parameters: weight: 0.28 density: 0.67 - model: Darkknight535/WinterEngine-24B-Instruct parameters: weight: 0.21 density: 0.72 - model: allura-org/Mistral-Small-24b-Sertraline-0304 parameters: weight: 0.19 density: 0.71 - model: allura-org/Mistral-Small-Sisyphus-24b-2503 parameters: weight: 0.32 density: 0.76 - model: ArliAI/Mistral-Small-24B-ArliAI-RPMax-v1.4 parameters: weight: 0.12 density: 0.65 - model: trashpanda-org/MS-24B-Instruct-Mullein-v0 parameters: weight: 0.12 density: 0.62 - model: lars1234/Mistral-Small-24B-Instruct-2501-writer parameters: weight: 0.32 density: 0.98 - model: TroyDoesAI/BlackSheep-24B parameters: weight: 0.21 density: 0.69 - model: ReadyArt/Forgotten-Safeword-24B-v4.0 parameters: weight: 0.16 density: 0.64 merge_method: dare_ties base_model: unsloth/Mistral-Small-24B-Instruct-2501 tokenizer_source: union parameters: density: 0.85 epsilon: 0.09 lambda: 1.25 dtype: bfloat16