File size: 490 Bytes
a99a26a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
slices:
- sources:
- layer_range: [0, 42]
model: meta-llama/Meta-Llama-3.1-405B-Instruct
- sources:
- layer_range: [21, 63]
model: meta-llama/Meta-Llama-3.1-405B-Instruct
- sources:
- layer_range: [42, 84]
model: meta-llama/Meta-Llama-3.1-405B-Instruct
- sources:
- layer_range: [63, 105]
model: meta-llama/Meta-Llama-3.1-405B-Instruct
- sources:
- layer_range: [84, 126]
model: meta-llama/Meta-Llama-3.1-405B-Instruct
merge_method: passthrough
dtype: bfloat16 |