File size: 305 Bytes
7e1c3fe
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
models:
  - model: Qwen2-7B-it-ko-quant
    parameters:
      density: 1
      weight: 1
  - model: Qwen/Qwen2-7B-Instruct
    parameters:
      density: 1
      weight: 1
merge_method: ties
base_model: Qwen/Qwen2-7B
parameters:
  int8_mask: true
  nomalize: true
  weight: 1
  density: 1
dtype: bfloat16