File size: 400 Bytes
f56861a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
slices:
  - sources:
    - model: ehristoforu/0001lp
      layer_range: [0, 32]
  - sources:
    - model: NeuralNovel/Llama-3-NeuralPaca-8b
      layer_range: [24, 32]
  - sources:
    - model: cognitivecomputations/dolphin-2.9-llama3-8b
      layer_range: [26, 32]
  - sources:
    - model: vicgalle/Configurable-Llama-3-8B-v0.2
      layer_range: [28, 32]
merge_method: passthrough
dtype: bfloat16