mergekit-ties-folgysk / mergekit_config.yml
Hjgugugjhuhjggg's picture
Upload folder using huggingface_hub
ee42efd verified
raw
history blame
565 Bytes
merge_method: ties
architectures: ["transformer"]
base_model: meta-llama/Llama-3.2-1B
models:
- model: Alelcv27/llama3.2-1b-math-code
- model: huyhoangt2201/llama-3.2-1b-sql_finetuned_billingual_3.0_merged
- model: autoprogrammer/Llama-3.2-1B-Instruct-MGSM8K-sft1
- model: meta-llama/Llama-3.2-1B-Instruct
- model: autoprogrammer/Llama-3.2-1B-Instruct-medmcqa-zh-linear
- model: meta-llama/Llama-3.2-1B
- model: unsloth/Llama-3.2-1B-Instruct-bnb-4bit
- model: MLking2/llama-3.2-1b-medical
parameters:
density: 0.5
weight: 1.0
int8_mask: true