DM-MERGE4c / mergekit_config.yml
Tarek07's picture
Upload folder using huggingface_hub
e0d23cb verified
raw
history blame contribute delete
746 Bytes
models:
- model: TheDrummer/Fallen-Llama-3.3-R1-70B-v1
parameters:
weight: 0.25
density: 0.7
epsilon: 0.2
lambda: 1.1
- model: ReadyArt/Forgotten-Safeword-70B-3.6
parameters:
weight: 0.25
density: 0.7
epsilon: 0.2
lambda: 1.1
- model: allura-org/Bigger-Body-70b
parameters:
weight: 0.25
density: 0.7
epsilon: 0.2
lambda: 1.1
- model: SicariusSicariiStuff/Negative_LLAMA_70B
parameters:
weight: 0.25
density: 0.7
epsilon: 0.1
lambda: 1
base_model: SicariusSicariiStuff/Negative_LLAMA_70B
merge_method: della_linear
parameters:
normalize: false
int8_mask: true
chat_template: llama3
tokenizer:
source: base
dtype: bfloat16