Meggido's picture
Upload folder using huggingface_hub
ff90d25 verified
raw
history blame
4.28 kB
base_model: ./result/input_models/Roleplay-Llama-3-8B_213413727
dtype: bfloat16
merge_method: dare_ties
parameters:
int8_mask: 1.0
normalize: 0.0
slices:
- sources:
- layer_range: [0, 4]
model: ./result/input_models/Llama-3-Lumimaid-8B-e1_2058152591
parameters:
density: 0.9061440388199886
weight: 0.7420827290507876
- layer_range: [0, 4]
model: ./result/input_models/Llama-3-Unholy-8B-e4_1440388923
parameters:
density: 0.8343357824656759
weight: 0.5634171099678891
- layer_range: [0, 4]
model: ./result/input_models/Roleplay-Llama-3-8B_213413727
parameters:
density: 1.0
weight: 0.03808449036687045
- sources:
- layer_range: [4, 8]
model: ./result/input_models/Llama-3-Lumimaid-8B-e1_2058152591
parameters:
density: 1.0
weight: 0.040706182952752565
- layer_range: [4, 8]
model: ./result/input_models/Llama-3-Unholy-8B-e4_1440388923
parameters:
density: 1.0
weight: 0.5235663919709214
- layer_range: [4, 8]
model: ./result/input_models/Roleplay-Llama-3-8B_213413727
parameters:
density: 0.6753137462586175
weight: 0.1718739352284447
- sources:
- layer_range: [8, 12]
model: ./result/input_models/Llama-3-Lumimaid-8B-e1_2058152591
parameters:
density: 0.8144143226543775
weight: 0.2916571301845346
- layer_range: [8, 12]
model: ./result/input_models/Llama-3-Unholy-8B-e4_1440388923
parameters:
density: 0.5944343021653459
weight: 0.6289130590047136
- layer_range: [8, 12]
model: ./result/input_models/Roleplay-Llama-3-8B_213413727
parameters:
density: 0.9096807190417433
weight: 0.18225448981675693
- sources:
- layer_range: [12, 16]
model: ./result/input_models/Llama-3-Lumimaid-8B-e1_2058152591
parameters:
density: 1.0
weight: 0.31346575871103577
- layer_range: [12, 16]
model: ./result/input_models/Llama-3-Unholy-8B-e4_1440388923
parameters:
density: 1.0
weight: 0.6710513199806648
- layer_range: [12, 16]
model: ./result/input_models/Roleplay-Llama-3-8B_213413727
parameters:
density: 1.0
weight: 0.2620098997126852
- sources:
- layer_range: [16, 20]
model: ./result/input_models/Llama-3-Lumimaid-8B-e1_2058152591
parameters:
density: 0.7957908643933549
weight: 0.4065602812739591
- layer_range: [16, 20]
model: ./result/input_models/Llama-3-Unholy-8B-e4_1440388923
parameters:
density: 1.0
weight: 0.3833004954478314
- layer_range: [16, 20]
model: ./result/input_models/Roleplay-Llama-3-8B_213413727
parameters:
density: 1.0
weight: 0.3722661530618318
- sources:
- layer_range: [20, 24]
model: ./result/input_models/Llama-3-Lumimaid-8B-e1_2058152591
parameters:
density: 0.8820161972577153
weight: 0.31407655218805003
- layer_range: [20, 24]
model: ./result/input_models/Llama-3-Unholy-8B-e4_1440388923
parameters:
density: 0.871522940513238
weight: 0.09916802739443117
- layer_range: [20, 24]
model: ./result/input_models/Roleplay-Llama-3-8B_213413727
parameters:
density: 0.843576104996367
weight: 0.48592770058071444
- sources:
- layer_range: [24, 28]
model: ./result/input_models/Llama-3-Lumimaid-8B-e1_2058152591
parameters:
density: 0.8818663379010269
weight: 0.4128563619116445
- layer_range: [24, 28]
model: ./result/input_models/Llama-3-Unholy-8B-e4_1440388923
parameters:
density: 0.89467562267532
weight: 0.39209478410830645
- layer_range: [24, 28]
model: ./result/input_models/Roleplay-Llama-3-8B_213413727
parameters:
density: 1.0
weight: 0.20302426278165847
- sources:
- layer_range: [28, 32]
model: ./result/input_models/Llama-3-Lumimaid-8B-e1_2058152591
parameters:
density: 0.8679751557926477
weight: 0.5226676522508309
- layer_range: [28, 32]
model: ./result/input_models/Llama-3-Unholy-8B-e4_1440388923
parameters:
density: 0.9145274983719552
weight: 0.4103390562947599
- layer_range: [28, 32]
model: ./result/input_models/Roleplay-Llama-3-8B_213413727
parameters:
density: 0.7116071161471552
weight: 0.5557266216543452