KanaWrite / model /model.xml
Penut's picture
upload source
d05d4ae
raw
history blame contribute delete
No virus
97.2 kB
<?xml version="1.0" ?>
<net name="torch-jit-export" version="11">
<layers>
<layer id="20" name="actual_input" type="Parameter" version="opset1">
<data shape="1,1,96,2000" element_type="f32"/>
<rt_info>
<attribute name="old_api_map_element_type" version="0" value="f16"/>
</rt_info>
<output>
<port id="0" precision="FP32" names="actual_input">
<dim>1</dim>
<dim>1</dim>
<dim>96</dim>
<dim>2000</dim>
<rt_info>
<attribute name="layout" version="0" layout="[N,C,H,W]"/>
</rt_info>
</port>
</output>
</layer>
<layer id="0" name="8108142758" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="1" name="8118152785" type="Const" version="opset1">
<data offset="4" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="2" name="8128162764" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="3" name="8138172737" type="Const" version="opset1">
<data offset="4" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="4" name="6176212809" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="5" name="6186222566" type="Const" version="opset1">
<data offset="8" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="6" name="6196232551" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="7" name="6206242830" type="Const" version="opset1">
<data offset="8" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="8" name="8008042554" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="9" name="8018052719" type="Const" version="opset1">
<data offset="12" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="10" name="8028062581" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="11" name="8038072803" type="Const" version="opset1">
<data offset="12" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="12" name="3573612596" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="13" name="3583622584" type="Const" version="opset1">
<data offset="16" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="14" name="3593632797" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="15" name="3603642587" type="Const" version="opset1">
<data offset="16" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="16" name="3974012656" type="Const" version="opset1">
<data offset="20" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="17" name="3984022836" type="Const" version="opset1">
<data offset="24" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="18" name="3994032713" type="Const" version="opset1">
<data offset="20" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="19" name="4004042572" type="Const" version="opset1">
<data offset="24" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="21" name="Constant_24761142620" type="Const" version="opset1">
<data offset="28" size="2" shape="1,1,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="22" name="Constant_24761141411/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="23" name="Subtract_351" type="Subtract" version="opset1">
<data auto_broadcast="numpy"/>
<rt_info>
<attribute name="preprocessing" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>1</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</output>
</layer>
<layer id="24" name="Multiply_2191/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>1</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</output>
</layer>
<layer id="25" name="Multiply_2191/fq_weights_1/scale17772680" type="Const" version="opset1">
<data offset="30" size="256" shape="64,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="26" name="Multiply_23251171412/restored_convert/quantized17692626" type="Const" version="opset1">
<data offset="286" size="576" shape="64,1,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>64</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="27" name="Multiply_23251171412/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>64</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="28" name="Multiply_2191/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>64</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="29" name="Multiply_2191" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<rt_info>
<attribute name="preprocessing" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>1</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
<port id="1">
<dim>64</dim>
<dim>1</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</output>
</layer>
<layer id="30" name="Constant_21961202707" type="Const" version="opset1">
<data offset="862" size="128" shape="1,64,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="31" name="Constant_21961201413/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="32" name="95" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="95">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</output>
</layer>
<layer id="33" name="96" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="96">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</output>
</layer>
<layer id="34" name="Multiply_2201/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</output>
</layer>
<layer id="35" name="Multiply_2201/fq_weights_1/scale18972593" type="Const" version="opset1">
<data offset="990" size="256" shape="64,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="36" name="Multiply_23311241414/restored_convert/quantized18892806" type="Const" version="opset1">
<data offset="1246" size="36864" shape="64,64,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="37" name="Multiply_23311241414/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="38" name="Multiply_2201/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="39" name="Multiply_2201" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
<port id="1">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</output>
</layer>
<layer id="40" name="Constant_22061272791" type="Const" version="opset1">
<data offset="38110" size="128" shape="1,64,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="41" name="Constant_22061271415/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="42" name="98" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="98">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</output>
</layer>
<layer id="43" name="99" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="99">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</output>
</layer>
<layer id="44" name="100/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</output>
</layer>
<layer id="45" name="100" type="MaxPool" version="opset8">
<data strides="2,2" kernel="2,2" pads_begin="0,0" pads_end="0,0" rounding_type="floor" auto_pad="explicit" dilations="1,1" axis="0" index_element_type="i64"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>96</dim>
<dim>2000</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="100">
<dim>1</dim>
<dim>64</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>48</dim>
<dim>1000</dim>
<rt_info/>
</port>
</output>
</layer>
<layer id="46" name="Multiply_2211/fq_weights_1/scale16272725" type="Const" version="opset1">
<data offset="38238" size="512" shape="128,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="47" name="Multiply_23371321416/restored_convert/quantized16192728" type="Const" version="opset1">
<data offset="38750" size="73728" shape="128,64,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>128</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="48" name="Multiply_23371321416/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>128</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="49" name="Multiply_2211/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>128</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>128</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="50" name="Multiply_2211" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>64</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
<port id="1">
<dim>128</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
</output>
</layer>
<layer id="51" name="Constant_22161352632" type="Const" version="opset1">
<data offset="112478" size="256" shape="1,128,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="52" name="Constant_22161351417/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="53" name="102" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="102">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
</output>
</layer>
<layer id="54" name="103" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="103">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
</output>
</layer>
<layer id="55" name="Multiply_2221/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
</output>
</layer>
<layer id="56" name="Multiply_2221/fq_weights_1/scale19272776" type="Const" version="opset1">
<data offset="112734" size="512" shape="128,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="57" name="Multiply_23431391418/restored_convert/quantized19192788" type="Const" version="opset1">
<data offset="113246" size="147456" shape="128,128,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="58" name="Multiply_23431391418/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="59" name="Multiply_2221/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="60" name="Multiply_2221" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
<port id="1">
<dim>128</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
</output>
</layer>
<layer id="61" name="Constant_22261422668" type="Const" version="opset1">
<data offset="260702" size="256" shape="1,128,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="62" name="Constant_22261421419/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="63" name="105" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>128</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="105">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
</output>
</layer>
<layer id="64" name="106" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="106">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
</output>
</layer>
<layer id="65" name="107/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
</output>
</layer>
<layer id="66" name="107" type="MaxPool" version="opset8">
<data strides="2,2" kernel="2,2" pads_begin="0,0" pads_end="0,0" rounding_type="floor" auto_pad="explicit" dilations="1,1" axis="0" index_element_type="i64"/>
<input>
<port id="0">
<dim>1</dim>
<dim>128</dim>
<dim>48</dim>
<dim>1000</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="107">
<dim>1</dim>
<dim>128</dim>
<dim>24</dim>
<dim>500</dim>
</port>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>128</dim>
<dim>24</dim>
<dim>500</dim>
<rt_info/>
</port>
</output>
</layer>
<layer id="68" name="8208242644" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="69" name="8218252752" type="Const" version="opset1">
<data offset="260958" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="70" name="8228262710" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="71" name="8238272578" type="Const" version="opset1">
<data offset="260958" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="72" name="4774812689" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="73" name="4784822794" type="Const" version="opset1">
<data offset="260962" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="74" name="4794832560" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="75" name="4804842698" type="Const" version="opset1">
<data offset="260962" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="76" name="5175212695" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="77" name="5185222590" type="Const" version="opset1">
<data offset="260966" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="78" name="5195232575" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="79" name="5205242848" type="Const" version="opset1">
<data offset="260966" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="80" name="Multiply_2231/fq_weights_1/scale19872623" type="Const" version="opset1">
<data offset="260970" size="1024" shape="256,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="81" name="Multiply_23491471420/restored_convert/quantized19792671" type="Const" version="opset1">
<data offset="261994" size="294912" shape="256,128,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>256</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="82" name="Multiply_23491471420/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>256</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="83" name="Multiply_2231/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>256</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>256</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="84" name="Multiply_2231" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>128</dim>
<dim>24</dim>
<dim>500</dim>
</port>
<port id="1">
<dim>256</dim>
<dim>128</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="85" name="Constant_22361502662" type="Const" version="opset1">
<data offset="556906" size="512" shape="1,256,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="86" name="Constant_22361501421/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="87" name="109" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="109">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="88" name="110" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="110">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="89" name="Multiply_2241/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="90" name="Multiply_2241/fq_weights_1/scale19572839" type="Const" version="opset1">
<data offset="557418" size="1024" shape="256,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="91" name="Multiply_23551541422/restored_convert/quantized19492659" type="Const" version="opset1">
<data offset="558442" size="589824" shape="256,256,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="92" name="Multiply_23551541422/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="93" name="Multiply_2241/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="94" name="Multiply_2241" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
<port id="1">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="95" name="Constant_22461572629" type="Const" version="opset1">
<data offset="1148266" size="512" shape="1,256,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="96" name="Constant_22461571423/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="97" name="112" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="112">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="98" name="113" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="113">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="99" name="Multiply_2251/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="100" name="Multiply_2251/fq_weights_1/scale20172833" type="Const" version="opset1">
<data offset="1148778" size="1024" shape="256,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="101" name="Multiply_23611611424/restored_convert/quantized20092599" type="Const" version="opset1">
<data offset="1149802" size="589824" shape="256,256,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="102" name="Multiply_23611611424/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="103" name="Multiply_2251/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="104" name="Multiply_2251" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
<port id="1">
<dim>256</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="105" name="Constant_22561642731" type="Const" version="opset1">
<data offset="1739626" size="512" shape="1,256,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="106" name="Constant_22561641425/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="107" name="115" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>256</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="115">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="108" name="116" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="116">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="109" name="118/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</output>
</layer>
<layer id="110" name="118" type="MaxPool" version="opset8">
<data strides="2,2" kernel="2,2" pads_begin="0,0" pads_end="0,0" rounding_type="floor" auto_pad="explicit" dilations="1,1" axis="0" index_element_type="i64"/>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>24</dim>
<dim>500</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="118">
<dim>1</dim>
<dim>256</dim>
<dim>12</dim>
<dim>250</dim>
</port>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>256</dim>
<dim>12</dim>
<dim>250</dim>
<rt_info/>
</port>
</output>
</layer>
<layer id="112" name="8308342746" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="113" name="8318352683" type="Const" version="opset1">
<data offset="1740138" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="114" name="8328362845" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="115" name="8338372770" type="Const" version="opset1">
<data offset="1740138" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="116" name="5575612650" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="117" name="5585622569" type="Const" version="opset1">
<data offset="1740142" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="118" name="5595632674" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="119" name="5605642812" type="Const" version="opset1">
<data offset="1740142" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="120" name="5775812716" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="121" name="5785822602" type="Const" version="opset1">
<data offset="1740146" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="122" name="5795832755" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="123" name="5805842722" type="Const" version="opset1">
<data offset="1740146" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="124" name="Multiply_2261/fq_weights_1/scale18672665" type="Const" version="opset1">
<data offset="1740150" size="2048" shape="512,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="125" name="Multiply_23671691426/restored_convert/quantized18592677" type="Const" version="opset1">
<data offset="1742198" size="1179648" shape="512,256,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>512</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="126" name="Multiply_23671691426/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>512</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="127" name="Multiply_2261/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>512</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>512</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="128" name="Multiply_2261" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>256</dim>
<dim>12</dim>
<dim>250</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>256</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="129" name="Constant_22661722749" type="Const" version="opset1">
<data offset="2921846" size="1024" shape="1,512,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="130" name="Constant_22661721427/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="131" name="121" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="121">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="132" name="122" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="122">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="133" name="Multiply_2271/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="134" name="Multiply_2271/fq_weights_1/scale16572647" type="Const" version="opset1">
<data offset="2922870" size="2048" shape="512,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="135" name="Multiply_23731761428/restored_convert/quantized16492617" type="Const" version="opset1">
<data offset="2924918" size="2359296" shape="512,512,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="136" name="Multiply_23731761428/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="137" name="Multiply_2271/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="138" name="Multiply_2271" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="139" name="Constant_22761792767" type="Const" version="opset1">
<data offset="5284214" size="1024" shape="1,512,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="140" name="Constant_22761791429/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="141" name="124" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="124">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="142" name="125" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="125">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="143" name="Multiply_2281/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="144" name="Multiply_2281/fq_weights_1/scale18372779" type="Const" version="opset1">
<data offset="5285238" size="2048" shape="512,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="145" name="Multiply_23791831430/restored_convert/quantized18292800" type="Const" version="opset1">
<data offset="5287286" size="2359296" shape="512,512,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="146" name="Multiply_23791831430/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="147" name="Multiply_2281/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="148" name="Multiply_2281" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="149" name="Constant_22861862686" type="Const" version="opset1">
<data offset="7646582" size="1024" shape="1,512,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="150" name="Constant_22861861431/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="151" name="127" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="127">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="152" name="128" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="128">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="153" name="130/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</output>
</layer>
<layer id="154" name="130" type="MaxPool" version="opset8">
<data strides="2,2" kernel="2,2" pads_begin="0,0" pads_end="0,0" rounding_type="floor" auto_pad="explicit" dilations="1,1" axis="0" index_element_type="i64"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>12</dim>
<dim>250</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="130">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
<rt_info/>
</port>
</output>
</layer>
<layer id="156" name="7907942704" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="157" name="7917952611" type="Const" version="opset1">
<data offset="7647606" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="158" name="7927962818" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="159" name="7937972635" type="Const" version="opset1">
<data offset="7647606" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="160" name="4574612743" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="161" name="4584622701" type="Const" version="opset1">
<data offset="7647610" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="162" name="4594632638" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="163" name="4604642740" type="Const" version="opset1">
<data offset="7647610" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="164" name="4374412608" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="165" name="4384422773" type="Const" version="opset1">
<data offset="7647614" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="166" name="4394432653" type="Const" version="opset1">
<data offset="0" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="167" name="4404442761" type="Const" version="opset1">
<data offset="7647614" size="4" shape="" element_type="f32"/>
<output>
<port id="0" precision="FP32"/>
</output>
</layer>
<layer id="168" name="Multiply_2291/fq_weights_1/scale17172734" type="Const" version="opset1">
<data offset="7647618" size="2048" shape="512,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="169" name="Multiply_23851911432/restored_convert/quantized17092842" type="Const" version="opset1">
<data offset="7649666" size="2359296" shape="512,512,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="170" name="Multiply_23851911432/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="171" name="Multiply_2291/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="172" name="Multiply_2291" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="173" name="Constant_22961942827" type="Const" version="opset1">
<data offset="10008962" size="1024" shape="1,512,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="174" name="Constant_22961941433/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="175" name="133" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="133">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="176" name="134" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="134">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="177" name="Multiply_2301/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="178" name="Multiply_2301/fq_weights_1/scale17472851" type="Const" version="opset1">
<data offset="10009986" size="2048" shape="512,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="179" name="Multiply_23911981434/restored_convert/quantized17392641" type="Const" version="opset1">
<data offset="10012034" size="2359296" shape="512,512,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="180" name="Multiply_23911981434/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="181" name="Multiply_2301/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="182" name="Multiply_2301" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="183" name="Constant_23062012605" type="Const" version="opset1">
<data offset="12371330" size="1024" shape="1,512,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="184" name="Constant_23062011435/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="185" name="136" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="136">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="186" name="137" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="137">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="187" name="Multiply_2311/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="188" name="Multiply_2311/fq_weights_1/scale16872614" type="Const" version="opset1">
<data offset="12372354" size="2048" shape="512,1,1,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="189" name="Multiply_23972051436/restored_convert/quantized16792815" type="Const" version="opset1">
<data offset="12374402" size="2359296" shape="512,512,3,3" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="190" name="Multiply_23972051436/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="191" name="Multiply_2311/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="192" name="Multiply_2311" type="Convolution" version="opset1">
<data auto_pad="explicit" strides="1,1" dilations="1,1" pads_begin="1,1" pads_end="1,1"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
<port id="1">
<dim>512</dim>
<dim>512</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="193" name="Constant_23162082557" type="Const" version="opset1">
<data offset="14733698" size="1024" shape="1,512,1,1" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="194" name="Constant_23162081437/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="195" name="139" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>512</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="139">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="196" name="140" type="ReLU" version="opset1">
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="140">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="197" name="142/fq_input_0" type="FakeQuantize" version="opset1">
<data levels="256" auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
<port id="1"/>
<port id="2"/>
<port id="3"/>
<port id="4"/>
</input>
<output>
<port id="5" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</output>
</layer>
<layer id="198" name="142" type="MaxPool" version="opset8">
<data strides="2,2" kernel="2,2" pads_begin="0,0" pads_end="0,0" rounding_type="floor" auto_pad="explicit" dilations="1,1" axis="0" index_element_type="i64"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>6</dim>
<dim>125</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="142">
<dim>1</dim>
<dim>512</dim>
<dim>3</dim>
<dim>62</dim>
</port>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>512</dim>
<dim>3</dim>
<dim>62</dim>
<rt_info/>
</port>
</output>
</layer>
<layer id="201" name="1442122821" type="Const" version="opset1">
<data offset="14734722" size="24" shape="3" element_type="i64"/>
<output>
<port id="0" precision="I64" names="144">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="202" name="145" type="Reshape" version="opset1">
<data special_zero="true"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>3</dim>
<dim>62</dim>
</port>
<port id="1">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="145">
<dim>1</dim>
<dim>512</dim>
<dim>186</dim>
</port>
</output>
</layer>
<layer id="203" name="148/fq_weights_1/scale18072563" type="Const" version="opset1">
<data offset="14734746" size="17768" shape="4442,1" element_type="f32"/>
<output>
<port id="0" precision="FP32">
<dim>4442</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="204" name="922151439/restored_convert/quantized17992782" type="Const" version="opset1">
<data offset="14752514" size="2274304" shape="4442,512" element_type="i8"/>
<output>
<port id="0" precision="I8">
<dim>4442</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="205" name="922151439/restored_convert/quantized/to_f32" type="Convert" version="opset1">
<data destination_type="f32"/>
<input>
<port id="0">
<dim>4442</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>4442</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="206" name="148/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>4442</dim>
<dim>512</dim>
</port>
<port id="1">
<dim>4442</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>4442</dim>
<dim>512</dim>
</port>
</output>
</layer>
<layer id="207" name="148" type="MatMul" version="opset1">
<data transpose_a="true" transpose_b="true"/>
<input>
<port id="0">
<dim>1</dim>
<dim>512</dim>
<dim>186</dim>
</port>
<port id="1">
<dim>4442</dim>
<dim>512</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="148">
<dim>1</dim>
<dim>186</dim>
<dim>4442</dim>
</port>
</output>
</layer>
<layer id="208" name="Constant_24772182692" type="Const" version="opset1">
<data offset="17026818" size="8884" shape="1,1,4442" element_type="f16"/>
<output>
<port id="0" precision="FP16">
<dim>1</dim>
<dim>1</dim>
<dim>4442</dim>
</port>
</output>
</layer>
<layer id="209" name="Constant_24772181440/restored_convert" type="Convert" version="opset1">
<data destination_type="f32"/>
<rt_info>
<attribute name="decompression" version="0"/>
</rt_info>
<input>
<port id="0">
<dim>1</dim>
<dim>1</dim>
<dim>4442</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>1</dim>
<dim>4442</dim>
</port>
</output>
</layer>
<layer id="210" name="149" type="Add" version="opset1">
<data auto_broadcast="numpy"/>
<input>
<port id="0">
<dim>1</dim>
<dim>186</dim>
<dim>4442</dim>
</port>
<port id="1">
<dim>1</dim>
<dim>1</dim>
<dim>4442</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="149">
<dim>1</dim>
<dim>186</dim>
<dim>4442</dim>
</port>
</output>
</layer>
<layer id="211" name="Constant_15952202824" type="Const" version="opset1">
<data offset="17035702" size="24" shape="3" element_type="i64"/>
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="212" name="output" type="Reshape" version="opset1">
<data special_zero="true"/>
<input>
<port id="0">
<dim>1</dim>
<dim>186</dim>
<dim>4442</dim>
</port>
<port id="1">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="output">
<dim>186</dim>
<dim>1</dim>
<dim>4442</dim>
</port>
</output>
</layer>
<layer id="213" name="output/sink_port_0" type="Result" version="opset1">
<input>
<port id="0">
<dim>186</dim>
<dim>1</dim>
<dim>4442</dim>
</port>
</input>
</layer>
</layers>
<edges>
<edge from-layer="21" from-port="0" to-layer="22" to-port="0"/>
<edge from-layer="20" from-port="0" to-layer="23" to-port="0"/>
<edge from-layer="22" from-port="1" to-layer="23" to-port="1"/>
<edge from-layer="23" from-port="2" to-layer="24" to-port="0"/>
<edge from-layer="16" from-port="0" to-layer="24" to-port="1"/>
<edge from-layer="17" from-port="0" to-layer="24" to-port="2"/>
<edge from-layer="18" from-port="0" to-layer="24" to-port="3"/>
<edge from-layer="19" from-port="0" to-layer="24" to-port="4"/>
<edge from-layer="26" from-port="0" to-layer="27" to-port="0"/>
<edge from-layer="27" from-port="1" to-layer="28" to-port="0"/>
<edge from-layer="25" from-port="0" to-layer="28" to-port="1"/>
<edge from-layer="24" from-port="5" to-layer="29" to-port="0"/>
<edge from-layer="28" from-port="2" to-layer="29" to-port="1"/>
<edge from-layer="30" from-port="0" to-layer="31" to-port="0"/>
<edge from-layer="29" from-port="2" to-layer="32" to-port="0"/>
<edge from-layer="31" from-port="1" to-layer="32" to-port="1"/>
<edge from-layer="32" from-port="2" to-layer="33" to-port="0"/>
<edge from-layer="33" from-port="1" to-layer="34" to-port="0"/>
<edge from-layer="12" from-port="0" to-layer="34" to-port="1"/>
<edge from-layer="13" from-port="0" to-layer="34" to-port="2"/>
<edge from-layer="14" from-port="0" to-layer="34" to-port="3"/>
<edge from-layer="15" from-port="0" to-layer="34" to-port="4"/>
<edge from-layer="36" from-port="0" to-layer="37" to-port="0"/>
<edge from-layer="37" from-port="1" to-layer="38" to-port="0"/>
<edge from-layer="35" from-port="0" to-layer="38" to-port="1"/>
<edge from-layer="34" from-port="5" to-layer="39" to-port="0"/>
<edge from-layer="38" from-port="2" to-layer="39" to-port="1"/>
<edge from-layer="40" from-port="0" to-layer="41" to-port="0"/>
<edge from-layer="39" from-port="2" to-layer="42" to-port="0"/>
<edge from-layer="41" from-port="1" to-layer="42" to-port="1"/>
<edge from-layer="42" from-port="2" to-layer="43" to-port="0"/>
<edge from-layer="43" from-port="1" to-layer="44" to-port="0"/>
<edge from-layer="8" from-port="0" to-layer="44" to-port="1"/>
<edge from-layer="9" from-port="0" to-layer="44" to-port="2"/>
<edge from-layer="10" from-port="0" to-layer="44" to-port="3"/>
<edge from-layer="11" from-port="0" to-layer="44" to-port="4"/>
<edge from-layer="44" from-port="5" to-layer="45" to-port="0"/>
<edge from-layer="47" from-port="0" to-layer="48" to-port="0"/>
<edge from-layer="48" from-port="1" to-layer="49" to-port="0"/>
<edge from-layer="46" from-port="0" to-layer="49" to-port="1"/>
<edge from-layer="45" from-port="1" to-layer="50" to-port="0"/>
<edge from-layer="49" from-port="2" to-layer="50" to-port="1"/>
<edge from-layer="51" from-port="0" to-layer="52" to-port="0"/>
<edge from-layer="50" from-port="2" to-layer="53" to-port="0"/>
<edge from-layer="52" from-port="1" to-layer="53" to-port="1"/>
<edge from-layer="53" from-port="2" to-layer="54" to-port="0"/>
<edge from-layer="54" from-port="1" to-layer="55" to-port="0"/>
<edge from-layer="4" from-port="0" to-layer="55" to-port="1"/>
<edge from-layer="5" from-port="0" to-layer="55" to-port="2"/>
<edge from-layer="6" from-port="0" to-layer="55" to-port="3"/>
<edge from-layer="7" from-port="0" to-layer="55" to-port="4"/>
<edge from-layer="57" from-port="0" to-layer="58" to-port="0"/>
<edge from-layer="58" from-port="1" to-layer="59" to-port="0"/>
<edge from-layer="56" from-port="0" to-layer="59" to-port="1"/>
<edge from-layer="55" from-port="5" to-layer="60" to-port="0"/>
<edge from-layer="59" from-port="2" to-layer="60" to-port="1"/>
<edge from-layer="61" from-port="0" to-layer="62" to-port="0"/>
<edge from-layer="60" from-port="2" to-layer="63" to-port="0"/>
<edge from-layer="62" from-port="1" to-layer="63" to-port="1"/>
<edge from-layer="63" from-port="2" to-layer="64" to-port="0"/>
<edge from-layer="64" from-port="1" to-layer="65" to-port="0"/>
<edge from-layer="0" from-port="0" to-layer="65" to-port="1"/>
<edge from-layer="1" from-port="0" to-layer="65" to-port="2"/>
<edge from-layer="2" from-port="0" to-layer="65" to-port="3"/>
<edge from-layer="3" from-port="0" to-layer="65" to-port="4"/>
<edge from-layer="65" from-port="5" to-layer="66" to-port="0"/>
<edge from-layer="81" from-port="0" to-layer="82" to-port="0"/>
<edge from-layer="82" from-port="1" to-layer="83" to-port="0"/>
<edge from-layer="80" from-port="0" to-layer="83" to-port="1"/>
<edge from-layer="66" from-port="1" to-layer="84" to-port="0"/>
<edge from-layer="83" from-port="2" to-layer="84" to-port="1"/>
<edge from-layer="85" from-port="0" to-layer="86" to-port="0"/>
<edge from-layer="84" from-port="2" to-layer="87" to-port="0"/>
<edge from-layer="86" from-port="1" to-layer="87" to-port="1"/>
<edge from-layer="87" from-port="2" to-layer="88" to-port="0"/>
<edge from-layer="88" from-port="1" to-layer="89" to-port="0"/>
<edge from-layer="76" from-port="0" to-layer="89" to-port="1"/>
<edge from-layer="77" from-port="0" to-layer="89" to-port="2"/>
<edge from-layer="78" from-port="0" to-layer="89" to-port="3"/>
<edge from-layer="79" from-port="0" to-layer="89" to-port="4"/>
<edge from-layer="91" from-port="0" to-layer="92" to-port="0"/>
<edge from-layer="92" from-port="1" to-layer="93" to-port="0"/>
<edge from-layer="90" from-port="0" to-layer="93" to-port="1"/>
<edge from-layer="89" from-port="5" to-layer="94" to-port="0"/>
<edge from-layer="93" from-port="2" to-layer="94" to-port="1"/>
<edge from-layer="95" from-port="0" to-layer="96" to-port="0"/>
<edge from-layer="94" from-port="2" to-layer="97" to-port="0"/>
<edge from-layer="96" from-port="1" to-layer="97" to-port="1"/>
<edge from-layer="97" from-port="2" to-layer="98" to-port="0"/>
<edge from-layer="98" from-port="1" to-layer="99" to-port="0"/>
<edge from-layer="72" from-port="0" to-layer="99" to-port="1"/>
<edge from-layer="73" from-port="0" to-layer="99" to-port="2"/>
<edge from-layer="74" from-port="0" to-layer="99" to-port="3"/>
<edge from-layer="75" from-port="0" to-layer="99" to-port="4"/>
<edge from-layer="101" from-port="0" to-layer="102" to-port="0"/>
<edge from-layer="102" from-port="1" to-layer="103" to-port="0"/>
<edge from-layer="100" from-port="0" to-layer="103" to-port="1"/>
<edge from-layer="99" from-port="5" to-layer="104" to-port="0"/>
<edge from-layer="103" from-port="2" to-layer="104" to-port="1"/>
<edge from-layer="105" from-port="0" to-layer="106" to-port="0"/>
<edge from-layer="104" from-port="2" to-layer="107" to-port="0"/>
<edge from-layer="106" from-port="1" to-layer="107" to-port="1"/>
<edge from-layer="107" from-port="2" to-layer="108" to-port="0"/>
<edge from-layer="108" from-port="1" to-layer="109" to-port="0"/>
<edge from-layer="68" from-port="0" to-layer="109" to-port="1"/>
<edge from-layer="69" from-port="0" to-layer="109" to-port="2"/>
<edge from-layer="70" from-port="0" to-layer="109" to-port="3"/>
<edge from-layer="71" from-port="0" to-layer="109" to-port="4"/>
<edge from-layer="109" from-port="5" to-layer="110" to-port="0"/>
<edge from-layer="125" from-port="0" to-layer="126" to-port="0"/>
<edge from-layer="126" from-port="1" to-layer="127" to-port="0"/>
<edge from-layer="124" from-port="0" to-layer="127" to-port="1"/>
<edge from-layer="110" from-port="1" to-layer="128" to-port="0"/>
<edge from-layer="127" from-port="2" to-layer="128" to-port="1"/>
<edge from-layer="129" from-port="0" to-layer="130" to-port="0"/>
<edge from-layer="128" from-port="2" to-layer="131" to-port="0"/>
<edge from-layer="130" from-port="1" to-layer="131" to-port="1"/>
<edge from-layer="131" from-port="2" to-layer="132" to-port="0"/>
<edge from-layer="132" from-port="1" to-layer="133" to-port="0"/>
<edge from-layer="120" from-port="0" to-layer="133" to-port="1"/>
<edge from-layer="121" from-port="0" to-layer="133" to-port="2"/>
<edge from-layer="122" from-port="0" to-layer="133" to-port="3"/>
<edge from-layer="123" from-port="0" to-layer="133" to-port="4"/>
<edge from-layer="135" from-port="0" to-layer="136" to-port="0"/>
<edge from-layer="136" from-port="1" to-layer="137" to-port="0"/>
<edge from-layer="134" from-port="0" to-layer="137" to-port="1"/>
<edge from-layer="133" from-port="5" to-layer="138" to-port="0"/>
<edge from-layer="137" from-port="2" to-layer="138" to-port="1"/>
<edge from-layer="139" from-port="0" to-layer="140" to-port="0"/>
<edge from-layer="138" from-port="2" to-layer="141" to-port="0"/>
<edge from-layer="140" from-port="1" to-layer="141" to-port="1"/>
<edge from-layer="141" from-port="2" to-layer="142" to-port="0"/>
<edge from-layer="142" from-port="1" to-layer="143" to-port="0"/>
<edge from-layer="116" from-port="0" to-layer="143" to-port="1"/>
<edge from-layer="117" from-port="0" to-layer="143" to-port="2"/>
<edge from-layer="118" from-port="0" to-layer="143" to-port="3"/>
<edge from-layer="119" from-port="0" to-layer="143" to-port="4"/>
<edge from-layer="145" from-port="0" to-layer="146" to-port="0"/>
<edge from-layer="146" from-port="1" to-layer="147" to-port="0"/>
<edge from-layer="144" from-port="0" to-layer="147" to-port="1"/>
<edge from-layer="143" from-port="5" to-layer="148" to-port="0"/>
<edge from-layer="147" from-port="2" to-layer="148" to-port="1"/>
<edge from-layer="149" from-port="0" to-layer="150" to-port="0"/>
<edge from-layer="148" from-port="2" to-layer="151" to-port="0"/>
<edge from-layer="150" from-port="1" to-layer="151" to-port="1"/>
<edge from-layer="151" from-port="2" to-layer="152" to-port="0"/>
<edge from-layer="152" from-port="1" to-layer="153" to-port="0"/>
<edge from-layer="112" from-port="0" to-layer="153" to-port="1"/>
<edge from-layer="113" from-port="0" to-layer="153" to-port="2"/>
<edge from-layer="114" from-port="0" to-layer="153" to-port="3"/>
<edge from-layer="115" from-port="0" to-layer="153" to-port="4"/>
<edge from-layer="153" from-port="5" to-layer="154" to-port="0"/>
<edge from-layer="169" from-port="0" to-layer="170" to-port="0"/>
<edge from-layer="170" from-port="1" to-layer="171" to-port="0"/>
<edge from-layer="168" from-port="0" to-layer="171" to-port="1"/>
<edge from-layer="154" from-port="1" to-layer="172" to-port="0"/>
<edge from-layer="171" from-port="2" to-layer="172" to-port="1"/>
<edge from-layer="173" from-port="0" to-layer="174" to-port="0"/>
<edge from-layer="172" from-port="2" to-layer="175" to-port="0"/>
<edge from-layer="174" from-port="1" to-layer="175" to-port="1"/>
<edge from-layer="175" from-port="2" to-layer="176" to-port="0"/>
<edge from-layer="176" from-port="1" to-layer="177" to-port="0"/>
<edge from-layer="164" from-port="0" to-layer="177" to-port="1"/>
<edge from-layer="165" from-port="0" to-layer="177" to-port="2"/>
<edge from-layer="166" from-port="0" to-layer="177" to-port="3"/>
<edge from-layer="167" from-port="0" to-layer="177" to-port="4"/>
<edge from-layer="179" from-port="0" to-layer="180" to-port="0"/>
<edge from-layer="180" from-port="1" to-layer="181" to-port="0"/>
<edge from-layer="178" from-port="0" to-layer="181" to-port="1"/>
<edge from-layer="177" from-port="5" to-layer="182" to-port="0"/>
<edge from-layer="181" from-port="2" to-layer="182" to-port="1"/>
<edge from-layer="183" from-port="0" to-layer="184" to-port="0"/>
<edge from-layer="182" from-port="2" to-layer="185" to-port="0"/>
<edge from-layer="184" from-port="1" to-layer="185" to-port="1"/>
<edge from-layer="185" from-port="2" to-layer="186" to-port="0"/>
<edge from-layer="186" from-port="1" to-layer="187" to-port="0"/>
<edge from-layer="160" from-port="0" to-layer="187" to-port="1"/>
<edge from-layer="161" from-port="0" to-layer="187" to-port="2"/>
<edge from-layer="162" from-port="0" to-layer="187" to-port="3"/>
<edge from-layer="163" from-port="0" to-layer="187" to-port="4"/>
<edge from-layer="189" from-port="0" to-layer="190" to-port="0"/>
<edge from-layer="190" from-port="1" to-layer="191" to-port="0"/>
<edge from-layer="188" from-port="0" to-layer="191" to-port="1"/>
<edge from-layer="187" from-port="5" to-layer="192" to-port="0"/>
<edge from-layer="191" from-port="2" to-layer="192" to-port="1"/>
<edge from-layer="193" from-port="0" to-layer="194" to-port="0"/>
<edge from-layer="192" from-port="2" to-layer="195" to-port="0"/>
<edge from-layer="194" from-port="1" to-layer="195" to-port="1"/>
<edge from-layer="195" from-port="2" to-layer="196" to-port="0"/>
<edge from-layer="196" from-port="1" to-layer="197" to-port="0"/>
<edge from-layer="156" from-port="0" to-layer="197" to-port="1"/>
<edge from-layer="157" from-port="0" to-layer="197" to-port="2"/>
<edge from-layer="158" from-port="0" to-layer="197" to-port="3"/>
<edge from-layer="159" from-port="0" to-layer="197" to-port="4"/>
<edge from-layer="197" from-port="5" to-layer="198" to-port="0"/>
<edge from-layer="198" from-port="1" to-layer="202" to-port="0"/>
<edge from-layer="201" from-port="0" to-layer="202" to-port="1"/>
<edge from-layer="204" from-port="0" to-layer="205" to-port="0"/>
<edge from-layer="205" from-port="1" to-layer="206" to-port="0"/>
<edge from-layer="203" from-port="0" to-layer="206" to-port="1"/>
<edge from-layer="202" from-port="2" to-layer="207" to-port="0"/>
<edge from-layer="206" from-port="2" to-layer="207" to-port="1"/>
<edge from-layer="208" from-port="0" to-layer="209" to-port="0"/>
<edge from-layer="207" from-port="2" to-layer="210" to-port="0"/>
<edge from-layer="209" from-port="1" to-layer="210" to-port="1"/>
<edge from-layer="210" from-port="2" to-layer="212" to-port="0"/>
<edge from-layer="211" from-port="0" to-layer="212" to-port="1"/>
<edge from-layer="212" from-port="2" to-layer="213" to-port="0"/>
</edges>
<rt_info>
<MO_version value="custom_HEAD_0250f62d11102ae97a35803756f079090876ddc1"/>
<Runtime_version value="2023.0.0-10367-0250f62d111-HEAD"/>
<conversion_parameters>
<framework value="onnx"/>
<input value="actual_input"/>
<input_model value="DIR/handwritten-japanese-recognition-0001.onnx"/>
<input_shape value="[1,1,96,2000]"/>
<layout value="actual_input(nchw)"/>
<mean_values value="actual_input[127.5]"/>
<model_name value="handwritten-japanese-recognition-0001"/>
<output value="output"/>
<output_dir value="DIR"/>
<scale_values value="actual_input[127.5]"/>
</conversion_parameters>
<legacy_frontend value="False"/>
<quantization_parameters>
<cli_params>
<ac_config value="None"/>
<data_source value="None"/>
<direct_dump value="True"/>
<engine value="None"/>
<evaluate value="False"/>
<keep_uncompressed_weights value="False"/>
<log_level value="INFO"/>
<max_drop value="None"/>
<model value="None"/>
<name value="None"/>
<output_dir value="PATH"/>
<pbar value="False"/>
<preset value="None"/>
<quantize value="None"/>
<stream_output value="False"/>
<weights value="None"/>
</cli_params>
<config value="{
'compression': {
'algorithms': [
{
'name': 'DefaultQuantization',
'params': {
'num_samples_for_tuning': 2000,
'preset': 'performance',
'stat_subset_size': 300,
'use_layerwise_tuning': false
}
}
],
'dump_intermediate_model': true,
'target_device': 'ANY'
},
'engine': {
'models': [
{
'name': 'handwritten-japanese-recognition-0001',
'launchers': [
{
'framework': 'openvino',
'adapter': 'ctc_greedy_search_decoder',
'device': 'cpu'
}
],
'datasets': [
{
'name': 'Kondate_nakayosi',
'data_source': 'PATH',
'annotation_conversion': {
'converter': 'kondate_nakayosi_recognition',
'decoding_char_file': 'PATH',
'annotation_file': 'PATH'
},
'annotation': 'PATH',
'dataset_meta': 'PATH',
'preprocessing': [
{
'type': 'bgr_to_gray'
},
{
'type': 'resize',
'interpolation': 'AREA',
'aspect_ratio_scale': 'width',
'size': 96
},
{
'type': 'padding',
'use_numpy': true,
'numpy_pad_mode': 'edge',
'dst_height': 96,
'dst_width': 2000,
'pad_type': 'right_bottom'
}
],
'metrics': [
{
'type': 'label_level_recognition_accuracy',
'reference': 0.9816
}
],
'_command_line_mapping': {
'decoding_char_file': 'PATH',
'annotation_file': 'PATH'
}
}
]
}
],
'stat_requests_number': null,
'eval_requests_number': null,
'type': 'accuracy_checker'
}
}"/>
<version value="invalid version"/>
</quantization_parameters>
</rt_info>
<quantization_parameters>
<config>{
'compression': {
'algorithms': [
{
'name': 'DefaultQuantization',
'params': {
'num_samples_for_tuning': 2000,
'preset': 'performance',
'stat_subset_size': 300,
'use_layerwise_tuning': false
}
}
],
'dump_intermediate_model': true,
'target_device': 'ANY'
},
'engine': {
'models': [
{
'name': 'handwritten-japanese-recognition-0001',
'launchers': [
{
'framework': 'openvino',
'adapter': 'ctc_greedy_search_decoder',
'device': 'cpu'
}
],
'datasets': [
{
'name': 'Kondate_nakayosi',
'data_source': 'PATH',
'annotation_conversion': {
'converter': 'kondate_nakayosi_recognition',
'decoding_char_file': 'PATH',
'annotation_file': 'PATH'
},
'annotation': 'PATH',
'dataset_meta': 'PATH',
'preprocessing': [
{
'type': 'bgr_to_gray'
},
{
'type': 'resize',
'interpolation': 'AREA',
'aspect_ratio_scale': 'width',
'size': 96
},
{
'type': 'padding',
'use_numpy': true,
'numpy_pad_mode': 'edge',
'dst_height': 96,
'dst_width': 2000,
'pad_type': 'right_bottom'
}
],
'metrics': [
{
'type': 'label_level_recognition_accuracy',
'reference': 0.9816
}
],
'_command_line_mapping': {
'decoding_char_file': 'PATH',
'annotation_file': 'PATH'
}
}
]
}
],
'stat_requests_number': null,
'eval_requests_number': null,
'type': 'accuracy_checker'
}
}</config>
<version value="invalid version"/>
<cli_params value="{'quantize': None, 'preset': None, 'model': None, 'weights': None, 'name': None, 'engine': None, 'ac_config': None, 'max_drop': None, 'evaluate': False, 'output_dir': 'PATH', 'direct_dump': True, 'log_level': 'INFO', 'pbar': False, 'stream_output': False, 'keep_uncompressed_weights': False, 'data_source': None}"/>
</quantization_parameters>
</net>