File size: 431 Bytes
bb5e616
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22

models:
  - model: google/gemma-7b-it
    parameters:
      density: 0.5
      weight: 0.3
  - model: HuggingFaceH4/zephyr-7b-gemma-v0.1
    parameters: 
      density: 0.5
      weight: 0.3 # weight gradient
  - model: mlabonne/Gemmalpaca-7B
    parameters:
      density: 0.5
      weight: 0.3 # weight gradient
merge_method: dare_ties
base_model: google/gemma-7b
parameters:
  normalize: true
  int8_mask: true
dtype: float16