File size: 360 Bytes
fbfd94d
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
models: # Top-Loyal-Bruins-Maid-DARE-7B
  - model: cognitivecomputations/dolphin-2.9-llama3-8b
    # no parameters necessary for base model
  - model: Weyaxi/Einstein-v6.1-Llama3-8B
    parameters:
      weight: 0.5
      density: 0.8
merge_method: dare_ties
base_model: cognitivecomputations/dolphin-2.9-llama3-8b
parameters:
  int8_mask: true
dtype: bfloat16