base_model: /content/evol_merge_storage/input_models/NeuralBeagle14-7B_2368216670 dtype: bfloat16 merge_method: task_arithmetic parameters: int8_mask: 1.0 normalize: 0.0 slices: - sources: - layer_range: [0, 8] model: /content/evol_merge_storage/input_models/NeuralBeagle14-7B_2368216670 parameters: weight: 0.6116678110210994 - layer_range: [0, 8] model: /content/evol_merge_storage/input_models/Starling-LM-7B-beta_581094980 parameters: weight: -0.24959657782037278 - layer_range: [0, 8] model: /content/evol_merge_storage/input_models/Mistral-7B-v0.1-flashback-v2-instruct_3664132380 parameters: weight: 0.540324494683666 - sources: - layer_range: [8, 16] model: /content/evol_merge_storage/input_models/NeuralBeagle14-7B_2368216670 parameters: weight: 0.3293682339424332 - layer_range: [8, 16] model: /content/evol_merge_storage/input_models/Starling-LM-7B-beta_581094980 parameters: weight: -0.023694567670847724 - layer_range: [8, 16] model: /content/evol_merge_storage/input_models/Mistral-7B-v0.1-flashback-v2-instruct_3664132380 parameters: weight: -0.1930115458123503 - sources: - layer_range: [16, 24] model: /content/evol_merge_storage/input_models/NeuralBeagle14-7B_2368216670 parameters: weight: 0.27340593188424295 - layer_range: [16, 24] model: /content/evol_merge_storage/input_models/Starling-LM-7B-beta_581094980 parameters: weight: 0.08277665681111157 - layer_range: [16, 24] model: /content/evol_merge_storage/input_models/Mistral-7B-v0.1-flashback-v2-instruct_3664132380 parameters: weight: -0.04650853736971121 - sources: - layer_range: [24, 32] model: /content/evol_merge_storage/input_models/NeuralBeagle14-7B_2368216670 parameters: weight: 0.22175238436196998 - layer_range: [24, 32] model: /content/evol_merge_storage/input_models/Starling-LM-7B-beta_581094980 parameters: weight: 0.3692597806977656 - layer_range: [24, 32] model: /content/evol_merge_storage/input_models/Mistral-7B-v0.1-flashback-v2-instruct_3664132380 parameters: weight: 0.5617035813353589