File size: 1,119 Bytes
86a37ad
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
name: OptimalMerge-14B
merge_method: dare_ties
base_model: suayptalha/Lamarckvergence-14B
tokenizer_source: base
dtype: bfloat16
out_dtype: bfloat16

parameters:
  normalize: true
  int8_mask: true

models:
  - model: suayptalha/Lamarckvergence-14B
    parameters:
      weight: 0.35
      density: 0.75
  - model: sthenno/tempesthenno-ppo-ckpt40
    parameters:
      weight: 0.25
      density: 0.7
  - model: tanliboy/lambda-qwen2.5-14b-dpo-test
    parameters:
      weight: 0.2
      density: 0.65
  - model: djuna/Q2.5-Veltha-14B
    parameters:
      weight: 0.1
      density: 0.6
  - model: Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4
    parameters:
      weight: 0.1
      density: 0.6

adaptive_merge_parameters:
  task_weights:
    IFEval: 1.8
    BBH: 1.5
    MATH: 2.0
    GPQA: 1.4
    MUSR: 1.3
    MMLU-PRO: 1.5
  smoothing_factor: 0.1

gradient_clipping:
  suayptalha/Lamarckvergence-14B: 0.85
  sthenno/tempesthenno-ppo-ckpt40: 0.88
  tanliboy/lambda-qwen2.5-14b-dpo-test: 0.87
  djuna/Q2.5-Veltha-14B: 0.89
  Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4: 0.86