Llama-3-Yollow-SCE / mergekit_config.yml
Casual-Autopsy's picture
Upload folder using huggingface_hub
74fbbab
raw
history blame
510 Bytes
models:
# Pivot model
- model: /kaggle/input/meta-llama-3-8b/transformers/hf/1
# Target models
- model: /kaggle/input/meta-llama-3-8b-instruct/transformers/hf/1
parameters:
select_topk: 0.8
- model: /kaggle/input/llama-3-youko-8b/transformers/hf/1
parameters:
select_topk: 0.65
- model: /kaggle/input/llama-3-swallow-8b-v0.1/transformers/hf/1
parameters:
select_topk: 0.65
merge_method: sce
base_model: /kaggle/input/meta-llama-3-8b/transformers/hf/1
dtype: bfloat16