File size: 674 Bytes
fd1c6e6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
{
"per_channel": true,
"reduce_range": true,
"per_model_config": {
"model": {
"op_types": [
"Constant",
"Reshape",
"Shape",
"Softmax",
"Mul",
"Cast",
"Unsqueeze",
"Erf",
"Sub",
"Sqrt",
"Gather",
"Transpose",
"Pow",
"Div",
"MatMul",
"ReduceMean",
"Add",
"Slice",
"Concat"
],
"weight_type": "QInt8"
}
}
} |