File size: 370 Bytes
6b4aa2b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
base_model: NousResearch/Llama-2-7b-hf
dtype: bfloat16
merge_method: task_arithmetic
slices:
- sources:
  - layer_range: [0, 32]
    model: taide/TAIDE-LX-7B
    parameters:
      weight: 1.0
  - layer_range: [0, 32]
    model: NousResearch/Llama-2-7b-chat-hf
    parameters:
      weight: 1.0
  - layer_range: [0, 32]
    model: NousResearch/Llama-2-7b-hf