bcse commited on
Commit
ea6eba2
1 Parent(s): dce86fa

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. mergekit_config.yml +41 -0
mergekit_config.yml ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ merge_method: linear
2
+ parameters:
3
+ weight: 1.0
4
+ slices:
5
+ - sources:
6
+ - model: ShinojiResearch/Senku-70B-Full
7
+ layer_range: [0, 1]
8
+ - model: Sao10K/Euryale-1.3-L2-70B
9
+ layer_range: [0, 1]
10
+ parameters:
11
+ weight: 0
12
+ - sources:
13
+ - model: ShinojiResearch/Senku-70B-Full
14
+ layer_range: [1, 20]
15
+ - sources:
16
+ - model: Sao10K/Euryale-1.3-L2-70B
17
+ layer_range: [10, 30]
18
+ - sources:
19
+ - model: ShinojiResearch/Senku-70B-Full
20
+ layer_range: [20, 40]
21
+ - sources:
22
+ - model: Sao10K/Euryale-1.3-L2-70B
23
+ layer_range: [30, 50]
24
+ - sources:
25
+ - model: ShinojiResearch/Senku-70B-Full
26
+ layer_range: [40, 60]
27
+ - sources:
28
+ - model: Sao10K/Euryale-1.3-L2-70B
29
+ layer_range: [50, 70]
30
+ - sources:
31
+ - model: ShinojiResearch/Senku-70B-Full
32
+ layer_range: [60, 79]
33
+ - sources:
34
+ - model: ShinojiResearch/Senku-70B-Full
35
+ layer_range: [79, 80]
36
+ - model: Sao10K/Euryale-1.3-L2-70B
37
+ layer_range: [79, 80]
38
+ parameters:
39
+ weight: 0
40
+ dtype: float16
41
+ tokenizer_source: model:ShinojiResearch/Senku-70B-Full