{ | |
"HuggingFaceH4/zephyr-7b-beta": [ | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 8, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 8, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 8, | |
"sequence_length": 4096, | |
"num_cores": 8, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 16, | |
"sequence_length": 4096, | |
"num_cores": 8, | |
"auto_cast_type": "bf16" | |
} | |
], | |
"NousResearch/Genstruct-7B": [ | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
} | |
], | |
"BioMistral/BioMistral-7B": [ | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
} | |
], | |
"NousResearch/Hermes-2-Pro-Mistral-7B": [ | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
} | |
], | |
"NousResearch/Nous-Hermes-2-Mistral-7B-DPO": [ | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
} | |
], | |
"ibm/merlinite-7b": [ | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
} | |
], | |
"mlabonne/AlphaMonarch-7B": [ | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "fp16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "fp16" | |
} | |
], | |
"teknium/OpenHermes-2.5-Mistral-7B": [ | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
} | |
], | |
"TencentARC/Mistral_Pro_8B_v0.1": [ | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
} | |
], | |
"openchat/openchat-3.5-0106": [ | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
} | |
] | |
} | |