{ | |
"meta-llama/Meta-Llama-3.1-8B": [ | |
{ | |
"batch_size": 1, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 8, | |
"sequence_length": 4096, | |
"num_cores": 2, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 4, | |
"sequence_length": 4096, | |
"num_cores": 8, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 8, | |
"sequence_length": 4096, | |
"num_cores": 8, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 16, | |
"sequence_length": 4096, | |
"num_cores": 8, | |
"auto_cast_type": "bf16" | |
}, | |
{ | |
"batch_size": 32, | |
"sequence_length": 4096, | |
"num_cores": 8, | |
"auto_cast_type": "bf16" | |
} | |
] | |
} |