models: - model: TareksLab/Wordsmith-V2.0-LLaMa-70B parameters: weight: 0.20 density: 0.5 epsilon: 0.05 lambda: 1.0 - model: TareksLab/Anathema-V2-LLaMA-70B parameters: weight: 0.20 density: 0.5 epsilon: 0.05 lambda: 1.0 - model: TareksLab/Scrivener-Base-V4-LLaMA-70B parameters: weight: 0.20 density: 0.5 epsilon: 0.05 lambda: 1.0 - model: TareksLab/RolePlayer-V4-LLaMa-70B parameters: weight: 0.20 density: 0.5 epsilon: 0.05 lambda: 1.0 - model: TareksLab/Erudite-V1-Unleashed-LLaMA-70B parameters: weight: 0.20 density: 0.5 epsilon: 0.05 lambda: 1.0 merge_method: della base_model: TareksLab/Scrivener-Base-V4-LLaMA-70B parameters: normalize: false int8_mask: true dtype: bfloat16 chat_template: llama3 tokenizer: source: TareksLab/Scrivener-Base-V4-LLaMA-70B