models: | |
- model: mikun/qwen2.5-9k-lora-model | |
parameters: | |
weight: 1 | |
density: 1 | |
- model: mikun/qwen2.5-13k-lora-model | |
parameters: | |
weight: 1 | |
density: 1 | |
- model: Qwen/Qwen2.5-7B-Instruct | |
parameters: | |
weight: 1 | |
density: 1 | |
merge_method: ties | |
base_model: Qwen/Qwen2.5-7B | |
parameters: | |
weight: 1 | |
density: 1 | |
normalize: true | |
int8_mask: true | |
tokenizer_source: mikun/qwen2.5-13k-lora-model | |
dtype: bfloat16 | |