mllm-dev's picture
Upload folder using huggingface_hub
c967a0b verified
raw
history blame contribute delete
878 Bytes
base_model:
model:
path: mllm-dev/gpt2_f_experiment_0_drug_data
dtype: float16
merge_method: dare_ties
parameters:
normalize: 1.0
slices:
- sources:
- layer_range: [0, 12]
model:
model:
path: mllm-dev/gpt2_f_experiment_0_drug_data
- layer_range: [0, 12]
model:
model:
path: mllm-dev/gpt2_f_experiment_1_drug_data
parameters:
density: 0.9
weight: 0.2
- layer_range: [0, 12]
model:
model:
path: mllm-dev/gpt2_f_experiment_2_drug_data
parameters:
density: 0.9
weight: 0.2
- layer_range: [0, 12]
model:
model:
path: mllm-dev/gpt2_f_experiment_3_drug_data
parameters:
density: 0.9
weight: 0.2
- layer_range: [0, 12]
model:
model:
path: mllm-dev/gpt2_f_experiment_4_drug_data
parameters:
density: 0.9
weight: 0.2