File size: 303 Bytes
3156d82
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15

models:
  - model: johnpaulbin/llama3.1-8b-e2-epoch3-merged-fp16
    parameters:
      weight: 1
  - model: meta-llama/Meta-Llama-3.1-8B-Instruct
    parameters:
      weight: 1
merge_method: ties
base_model: meta-llama/Meta-Llama-3.1-8B
parameters:
  normalize: true
  int8_mask: true
dtype: bfloat16