Kudzu-8B / mergekit_config.yml
lodrick-the-lafted's picture
Upload folder using huggingface_hub
e0ba374 verified
raw
history blame
No virus
5.69 kB
base_model: unsloth/llama-3-8b-Instruct
dtype: bfloat16
merge_method: task_arithmetic
parameters:
int8_mask: 1.0
normalize: 0.0
slices:
- sources:
- layer_range: [0, 4]
model: cgato/L3-TheSpice-8b-v0.8.3
parameters:
weight: 0.5422192553245822
- layer_range: [0, 4]
model: Edgerunners/meta-llama-3-8b-instruct-hf-ortho-baukit-10fail-1000total
parameters:
weight: 0.4091327905812484
- layer_range: [0, 4]
model: lodrick-the-lafted/Olethros-8B
parameters:
weight: 0.41616131175474513
- layer_range: [0, 4]
model: lodrick-the-lafted/Rummage-8B
parameters:
weight: 0.2850660139219532
- layer_range: [0, 4]
model: lodrick-the-lafted/Limon-8B
parameters:
weight: 0.40964287418911943
- layer_range: [0, 4]
model: unsloth/llama-3-8b-Instruct
- sources:
- layer_range: [4, 8]
model: cgato/L3-TheSpice-8b-v0.8.3
parameters:
weight: 0.5566053877399006
- layer_range: [4, 8]
model: Edgerunners/meta-llama-3-8b-instruct-hf-ortho-baukit-10fail-1000total
parameters:
weight: 0.21784116178903482
- layer_range: [4, 8]
model: lodrick-the-lafted/Olethros-8B
parameters:
weight: -0.08140279404713095
- layer_range: [4, 8]
model: lodrick-the-lafted/Rummage-8B
parameters:
weight: 0.07605805259348924
- layer_range: [4, 8]
model: lodrick-the-lafted/Limon-8B
parameters:
weight: 0.1992918731781218
- layer_range: [4, 8]
model: unsloth/llama-3-8b-Instruct
- sources:
- layer_range: [8, 12]
model: cgato/L3-TheSpice-8b-v0.8.3
parameters:
weight: 0.42637906930687397
- layer_range: [8, 12]
model: Edgerunners/meta-llama-3-8b-instruct-hf-ortho-baukit-10fail-1000total
parameters:
weight: 0.30844715223039254
- layer_range: [8, 12]
model: lodrick-the-lafted/Olethros-8B
parameters:
weight: -0.1606133625830507
- layer_range: [8, 12]
model: lodrick-the-lafted/Rummage-8B
parameters:
weight: 0.5539782533846855
- layer_range: [8, 12]
model: lodrick-the-lafted/Limon-8B
parameters:
weight: 0.1495318571640683
- layer_range: [8, 12]
model: unsloth/llama-3-8b-Instruct
- sources:
- layer_range: [12, 16]
model: cgato/L3-TheSpice-8b-v0.8.3
parameters:
weight: 0.03452080798253701
- layer_range: [12, 16]
model: Edgerunners/meta-llama-3-8b-instruct-hf-ortho-baukit-10fail-1000total
parameters:
weight: 0.044520785633251986
- layer_range: [12, 16]
model: lodrick-the-lafted/Olethros-8B
parameters:
weight: 0.5655679918081231
- layer_range: [12, 16]
model: lodrick-the-lafted/Rummage-8B
parameters:
weight: 0.35243425883642854
- layer_range: [12, 16]
model: lodrick-the-lafted/Limon-8B
parameters:
weight: 0.26454740785869507
- layer_range: [12, 16]
model: unsloth/llama-3-8b-Instruct
- sources:
- layer_range: [16, 20]
model: cgato/L3-TheSpice-8b-v0.8.3
parameters:
weight: -0.13407278819856827
- layer_range: [16, 20]
model: Edgerunners/meta-llama-3-8b-instruct-hf-ortho-baukit-10fail-1000total
parameters:
weight: 0.03053108740438702
- layer_range: [16, 20]
model: lodrick-the-lafted/Olethros-8B
parameters:
weight: 0.24708199638503964
- layer_range: [16, 20]
model: lodrick-the-lafted/Rummage-8B
parameters:
weight: 0.5044941721525371
- layer_range: [16, 20]
model: lodrick-the-lafted/Limon-8B
parameters:
weight: -0.31653138355426913
- layer_range: [16, 20]
model: unsloth/llama-3-8b-Instruct
- sources:
- layer_range: [20, 24]
model: cgato/L3-TheSpice-8b-v0.8.3
parameters:
weight: 0.004416564245359474
- layer_range: [20, 24]
model: Edgerunners/meta-llama-3-8b-instruct-hf-ortho-baukit-10fail-1000total
parameters:
weight: -0.11598379217665301
- layer_range: [20, 24]
model: lodrick-the-lafted/Olethros-8B
parameters:
weight: -0.12876299010595685
- layer_range: [20, 24]
model: lodrick-the-lafted/Rummage-8B
parameters:
weight: 0.3784898534536006
- layer_range: [20, 24]
model: lodrick-the-lafted/Limon-8B
parameters:
weight: 0.061657588569312805
- layer_range: [20, 24]
model: unsloth/llama-3-8b-Instruct
- sources:
- layer_range: [24, 28]
model: cgato/L3-TheSpice-8b-v0.8.3
parameters:
weight: 0.16516095334523673
- layer_range: [24, 28]
model: Edgerunners/meta-llama-3-8b-instruct-hf-ortho-baukit-10fail-1000total
parameters:
weight: 0.26091155041512143
- layer_range: [24, 28]
model: lodrick-the-lafted/Olethros-8B
parameters:
weight: 0.07302391957685345
- layer_range: [24, 28]
model: lodrick-the-lafted/Rummage-8B
parameters:
weight: 0.47418904982691856
- layer_range: [24, 28]
model: lodrick-the-lafted/Limon-8B
parameters:
weight: 0.01756883139321664
- layer_range: [24, 28]
model: unsloth/llama-3-8b-Instruct
- sources:
- layer_range: [28, 32]
model: cgato/L3-TheSpice-8b-v0.8.3
parameters:
weight: 0.25017643854726784
- layer_range: [28, 32]
model: Edgerunners/meta-llama-3-8b-instruct-hf-ortho-baukit-10fail-1000total
parameters:
weight: 0.19459562974498176
- layer_range: [28, 32]
model: lodrick-the-lafted/Olethros-8B
parameters:
weight: 0.16680974426463924
- layer_range: [28, 32]
model: lodrick-the-lafted/Rummage-8B
parameters:
weight: 0.10108316256907879
- layer_range: [28, 32]
model: lodrick-the-lafted/Limon-8B
parameters:
weight: 0.24255074443491947
- layer_range: [28, 32]
model: unsloth/llama-3-8b-Instruct