diff --git "a/README.md" "b/README.md" new file mode 100644--- /dev/null +++ "b/README.md" @@ -0,0 +1,5955 @@ +--- +base_model: +- lilmeaty/2 +- ibm-granite/granite-3b-code-base-2k +- Qwen/Qwen2.5-3B +- ICEPVP8977/Uncensored_llama_3.2_3b_safetensors +- Qwen/Qwen2.5-3B-Instruct +- PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B +- ministral/Ministral-3b-instruct +- lilmeaty/4 +- Bllossom/llama-3.2-Korean-Bllossom-3B +- stabilityai/stable-code-3b +- lilmeaty/5 +- chuanli11/Llama-3.2-3B-Instruct-uncensored +library_name: transformers +tags: +- mergekit +- merge + +--- +# merge + +This is a merge of pre-trained language models created using [mergekit](https://github.com/cg123/mergekit). + +## Merge Details +### Merge Method + +This model was merged using the passthrough merge method. + +### Models Merged + +The following models were included in the merge: +* [lilmeaty/2](https://huggingface.co/lilmeaty/2) +* [ibm-granite/granite-3b-code-base-2k](https://huggingface.co/ibm-granite/granite-3b-code-base-2k) +* [Qwen/Qwen2.5-3B](https://huggingface.co/Qwen/Qwen2.5-3B) +* [ICEPVP8977/Uncensored_llama_3.2_3b_safetensors](https://huggingface.co/ICEPVP8977/Uncensored_llama_3.2_3b_safetensors) +* [Qwen/Qwen2.5-3B-Instruct](https://huggingface.co/Qwen/Qwen2.5-3B-Instruct) +* [PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B](https://huggingface.co/PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B) +* [ministral/Ministral-3b-instruct](https://huggingface.co/ministral/Ministral-3b-instruct) +* [lilmeaty/4](https://huggingface.co/lilmeaty/4) +* [Bllossom/llama-3.2-Korean-Bllossom-3B](https://huggingface.co/Bllossom/llama-3.2-Korean-Bllossom-3B) +* [stabilityai/stable-code-3b](https://huggingface.co/stabilityai/stable-code-3b) +* [lilmeaty/5](https://huggingface.co/lilmeaty/5) +* [chuanli11/Llama-3.2-3B-Instruct-uncensored](https://huggingface.co/chuanli11/Llama-3.2-3B-Instruct-uncensored) + +### Configuration + +The following YAML configuration was used to produce this model: + +```yaml +slices: + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/5 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ICEPVP8977/Uncensored_llama_3.2_3b_safetensors + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/2 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: lilmeaty/4 + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: chuanli11/Llama-3.2-3B-Instruct-uncensored + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Bllossom/llama-3.2-Korean-Bllossom-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B-Instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: Qwen/Qwen2.5-3B + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: stabilityai/stable-code-3b + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ibm-granite/granite-3b-code-base-2k + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + - sources: + - layer_range: [0, 3] + model: ministral/Ministral-3b-instruct + parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true +parameters: + normalize: true + int8_mask: true + density: 0.5 + weight: 0.1 + random_seed: 0 + temperature: 0.5 + top_p: 0.65 + inference: true + max_tokens: 999999999 + stream: true +normalize: true +int8_mask: true +density: 0.5 +weight: 0.1 +random_seed: 0 +temperature: 0.5 +top_p: 0.65 +inference: true +max_tokens: 999999999 +stream: true +merge_method: passthrough + +```