File size: 677 Bytes
f3ab632
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
{
    "per_channel": true,
    "reduce_range": true,
    "per_model_config": {
        "model": {
            "op_types": [
                "Unsqueeze",
                "Div",
                "Sub",
                "Transpose",
                "Erf",
                "Gather",
                "Mul",
                "Constant",
                "Shape",
                "Softmax",
                "Sqrt",
                "Add",
                "ReduceMean",
                "Concat",
                "MatMul",
                "Cast",
                "Pow",
                "Reshape",
                "Identity"
            ],
            "weight_type": "QInt8"
        }
    }
}