File size: 489 Bytes
22f71e5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 |
slices:
- sources:
- model: Casual-Autopsy/Llama-3-Psychology-LoRA-Stock-8B
layer_range: [0, 32]
parameters:
weight: 0.6
- model: Casual-Autopsy/Llama-3-MopeyMule-Blackroot-8B
layer_range: [0, 32]
parameters:
weight: 0.25
- model: Casual-Autopsy/Llama-3-SOVL-MopeyMule-8B
layer_range: [0, 32]
parameters:
weight: 0.15
merge_method: task_arithmetic
base_model: Casual-Autopsy/Llama-3-Psychology-LoRA-Stock-8B
normalize: False
dtype: bfloat16
|