File size: 486 Bytes
6e8d22b |
1 2 3 4 5 6 7 8 9 10 |
models:
- model: failspy/Llama-3-8B-Instruct-MopeyMule+Blackroot/Llama3-RP-Lora
- model: failspy/Llama-3-8B-Instruct-MopeyMule+zementalist/llama-3-8B-chat-psychotherapist
- model: failspy/Llama-3-8B-Instruct-MopeyMule+ResplendentAI/Llama3_RP_ORPO_LoRA
- model: failspy/Llama-3-8B-Instruct-MopeyMule+kloodia/lora-8b-math
merge_method: model_stock
base_model: failspy/Llama-3-8B-Instruct-MopeyMule+Blackroot/Llama-3-8B-Abomination-LORA
normalize: false
int8_mask: true
dtype: bfloat16 |