| merge_method: passthrough | |
| dtype: bfloat16 | |
| slices: | |
| - sources: | |
| - model: Qwen/Qwen1.5-4B-Chat | |
| layer_range: [0, 30] | |
| - sources: | |
| - model: TinyLlama/TinyLlama-1.1B-intermediate-step-1195k-token-2.5T | |
| layer_range: [0, 22] | |
| merge_method: passthrough |
| merge_method: passthrough | |
| dtype: bfloat16 | |
| slices: | |
| - sources: | |
| - model: Qwen/Qwen1.5-4B-Chat | |
| layer_range: [0, 30] | |
| - sources: | |
| - model: TinyLlama/TinyLlama-1.1B-intermediate-step-1195k-token-2.5T | |
| layer_range: [0, 22] | |
| merge_method: passthrough |