L3-UI-v1-8B / mergekit_config.yml
Frowning's picture
Upload folder using huggingface_hub
55ae53e verified
raw
history blame contribute delete
No virus
358 Bytes
slices:
- sources:
- model: Casual-Autopsy/L3-Umbral-Mind-RP-v0.3-8B
layer_range: [0, 32]
parameters:
weight: 1.0
- model: princeton-nlp/Llama-3-Instruct-8B-SimPO
layer_range: [0, 32]
parameters:
weight: 1.0
merge_method: task_arithmetic
base_model: Casual-Autopsy/L3-Umbral-Mind-RP-v0.3-8B
normalize: False
dtype: bfloat16