| models: | |
| - model: HuggingFaceH4/zephyr-7b-beta | |
| - model: meta-llama/Llama-2-7b-chat-hf | |
| parameters: | |
| density: 0.1 | |
| weight: # weight gradient | |
| - filter: mlp | |
| value: 0.1 | |
| - value: 0 | |
| merge_method: ties | |
| base_model: HuggingFaceH4/zephyr-7b-beta | |
| parameters: | |
| normalize: true | |
| int8_mask: true | |
| dtype: float16 |