File size: 409 Bytes
36c42c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18

models:
    - model: mistralai/Mistral-7B-v0.1  
    - model: OpenPipe/mistral-ft-optimized-1218
      parameters:
        density: 0.5  # density gradient
        weight: 0.3
    - model: mlabonne/NeuralHermes-2.5-Mistral-7B
      parameters:
        density: 0.5
        weight: 0.3  # weight gradient
merge_method: ties
base_model: mistralai/Mistral-7B-v0.1
parameters:
  normalize: true
dtype: bfloat16