File size: 674 Bytes
2cb1e2f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
{
    "per_channel": true,
    "reduce_range": true,
    "per_model_config": {
        "model": {
            "op_types": [
                "Sub",
                "Gather",
                "Mul",
                "Pow",
                "Reshape",
                "Div",
                "Constant",
                "Sqrt",
                "ReduceMean",
                "Concat",
                "Add",
                "Unsqueeze",
                "Softmax",
                "Erf",
                "MatMul",
                "Transpose",
                "Cast",
                "Shape",
                "Slice"
            ],
            "weight_type": "QInt8"
        }
    }
}