File size: 377 Bytes
6e70fa1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
{
    "per_channel": true,
    "reduce_range": true,
    "per_model_config": {
        "model": {
            "op_types": [
                "Conv",
                "Gemm",
                "GlobalAveragePool",
                "Flatten",
                "MaxPool",
                "Relu",
                "Add"
            ],
            "weight_type": "QUInt8"
        }
    }
}