serverless-workflow-examples/serverless-workflow-openvino-quarkus/model/v3-small_224_1.0_float.xml (9,626 lines of code) (raw):

<?xml version="1.0" ?> <net name="v3-small_224_1.0_float" version="11"> <layers> <layer id="0" name="input" type="Parameter" version="opset1"> <data shape="1,224,224,3" element_type="f32"/> <rt_info> <attribute name="fused_names" version="0" value="input"/> <attribute name="old_api_map_element_type" version="0" value="f16"/> <attribute name="old_api_map_order" version="0" value="0, 2, 3, 1"/> </rt_info> <output> <port id="0" precision="FP32" names="input:0"> <dim>1</dim> <dim>224</dim> <dim>224</dim> <dim>3</dim> </port> </output> </layer> <layer id="1" name="input/Transpose([0 3 1 2])/value1210615860" type="Const" version="opset1"> <data element_type="i64" shape="4" offset="0" size="32"/> <rt_info> <attribute name="fused_names" version="0" value="input/Transpose([0 3 1 2])/value1210615860"/> </rt_info> <output> <port id="0" precision="I64"> <dim>4</dim> </port> </output> </layer> <layer id="2" name="input/Transpose([0 3 1 2])" type="Transpose" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="input/Transpose([0 3 1 2])"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>224</dim> <dim>224</dim> <dim>3</dim> </port> <port id="1" precision="I64"> <dim>4</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>3</dim> <dim>224</dim> <dim>224</dim> </port> </output> </layer> <layer id="3" name="data_mul_921415998_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1, 1, 1" offset="32" size="2"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="4" name="data_mul_921415998" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_mul_921415998"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="5" name="input/scale/Fused_Mul_" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="input/scale/Fused_Mul_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>3</dim> <dim>224</dim> <dim>224</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>3</dim> <dim>224</dim> <dim>224</dim> </port> </output> </layer> <layer id="6" name="data_add_921616001_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 3, 1, 1" offset="34" size="6"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>3</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="7" name="data_add_921616001" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_921616001"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>3</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>3</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="8" name="input/mean/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="input/mean/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>3</dim> <dim>224</dim> <dim>224</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>3</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/input:0,batch_processing/Reshape:0"> <dim>1</dim> <dim>3</dim> <dim>224</dim> <dim>224</dim> </port> </output> </layer> <layer id="9" name="MobilenetV3/Conv/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="16, 3, 3, 3" offset="40" size="864"/> <output> <port id="0" precision="FP16"> <dim>16</dim> <dim>3</dim> <dim>3</dim> <dim>3</dim> </port> </output> </layer> <layer id="10" name="MobilenetV3/Conv/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/Conv/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>16</dim> <dim>3</dim> <dim>3</dim> <dim>3</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/Conv/weights/read:0"> <dim>16</dim> <dim>3</dim> <dim>3</dim> <dim>3</dim> </port> </output> </layer> <layer id="11" name="MobilenetV3/Conv/Conv2D" type="Convolution" version="opset1"> <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Conv/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>3</dim> <dim>224</dim> <dim>224</dim> </port> <port id="1" precision="FP32"> <dim>16</dim> <dim>3</dim> <dim>3</dim> <dim>3</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>112</dim> <dim>112</dim> </port> </output> </layer> <layer id="12" name="data_add_92199224_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 16, 1, 1" offset="904" size="32"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="13" name="data_add_92199224" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_92199224"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="14" name="MobilenetV3/Conv/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Conv/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>112</dim> <dim>112</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/Conv/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>16</dim> <dim>112</dim> <dim>112</dim> </port> </output> </layer> <layer id="15" name="MobilenetV3/Conv/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Conv/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>112</dim> <dim>112</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/Conv/hard_swish/mul_1:0,MobilenetV3/expanded_conv/input:0"> <dim>1</dim> <dim>16</dim> <dim>112</dim> <dim>112</dim> </port> </output> </layer> <layer id="16" name="MobilenetV3/expanded_conv/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="16, 1, 1, 3, 3" offset="936" size="288"/> <output> <port id="0" precision="FP16"> <dim>16</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </output> </layer> <layer id="17" name="MobilenetV3/expanded_conv/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>16</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv/depthwise/depthwise_weights/read:0"> <dim>16</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </output> </layer> <layer id="18" name="MobilenetV3/expanded_conv/depthwise/depthwise" type="GroupConvolution" version="opset1"> <data strides="2, 2" pads_begin="0, 0" pads_end="0, 0" dilations="1, 1" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/depthwise/depthwise"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>112</dim> <dim>112</dim> </port> <port id="1" precision="FP32"> <dim>16</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> </output> </layer> <layer id="19" name="data_add_92279232_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 16, 1, 1" offset="1224" size="32"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="20" name="data_add_92279232" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_92279232"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="21" name="MobilenetV3/expanded_conv/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv/depthwise/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> </output> </layer> <layer id="22" name="MobilenetV3/expanded_conv/depthwise/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/depthwise/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv/depthwise/Relu:0,MobilenetV3/expanded_conv/depthwise_output:0"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> </output> </layer> <layer id="23" name="MobilenetV3/expanded_conv/squeeze_excite/AvgPool" type="AvgPool" version="opset1"> <data kernel="56, 56" strides="1, 1" pads_begin="0, 0" pads_end="0, 0" exclude-pad="true" auto_pad="valid" rounding_type="floor"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/AvgPool"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/AvgPool:0"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="24" name="MobilenetV3/expanded_conv/squeeze_excite/Conv/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="8, 16, 1, 1" offset="1256" size="256"/> <output> <port id="0" precision="FP16"> <dim>8</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="25" name="MobilenetV3/expanded_conv/squeeze_excite/Conv/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/Conv/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>8</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/Conv/weights/read:0"> <dim>8</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="26" name="MobilenetV3/expanded_conv/squeeze_excite/Conv/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/Conv/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>8</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/Conv/Conv2D:0"> <dim>1</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="27" name="MobilenetV3/expanded_conv/squeeze_excite/Conv/biases/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 8, 1, 1" offset="1512" size="16"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="28" name="MobilenetV3/expanded_conv/squeeze_excite/Conv/biases/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/Conv/biases/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/Conv/biases/read:0"> <dim>1</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="29" name="MobilenetV3/expanded_conv/squeeze_excite/Conv/BiasAdd/Add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/Conv/BiasAdd/Add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/Conv/BiasAdd:0"> <dim>1</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="30" name="MobilenetV3/expanded_conv/squeeze_excite/Conv/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/Conv/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/Conv/Relu:0"> <dim>1</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="31" name="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="16, 8, 1, 1" offset="1528" size="256"/> <output> <port id="0" precision="FP16"> <dim>16</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="32" name="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>16</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/weights/read:0"> <dim>16</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="33" name="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>16</dim> <dim>8</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/Conv2D:0"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="34" name="data_add_92349236_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 16, 1, 1" offset="1784" size="32"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="35" name="data_add_92349236" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_92349236"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="36" name="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/add:0"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="37" name="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/Relu6" type="Clamp" version="opset1"> <data min="0" max="6"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/Relu6"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/Relu6:0"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="38" name="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/mul/y_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1, 1, 1" offset="1816" size="2"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="39" name="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/mul/y" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/mul/y"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/mul/y:0"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="40" name="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/Conv_1/mul:0,MobilenetV3/expanded_conv/squeeze_excite/excite:0"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="41" name="MobilenetV3/expanded_conv/squeeze_excite/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/squeeze_excite/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv/squeeze_excite/mul:0"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> </output> </layer> <layer id="42" name="MobilenetV3/expanded_conv/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="16, 16, 1, 1" offset="1818" size="512"/> <output> <port id="0" precision="FP16"> <dim>16</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="43" name="MobilenetV3/expanded_conv/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>16</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv/project/weights/read:0"> <dim>16</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="44" name="MobilenetV3/expanded_conv/project/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/project/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> <port id="1" precision="FP32"> <dim>16</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> </output> </layer> <layer id="45" name="data_add_92399244_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 16, 1, 1" offset="2330" size="32"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="46" name="data_add_92399244" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_92399244"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="47" name="MobilenetV3/expanded_conv/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv/output:0,MobilenetV3/expanded_conv/project/BatchNorm/FusedBatchNorm:0,MobilenetV3/expanded_conv/project/Identity:0,MobilenetV3/expanded_conv_1/input:0"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> </output> </layer> <layer id="48" name="MobilenetV3/expanded_conv_1/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="72, 16, 1, 1" offset="2362" size="2304"/> <output> <port id="0" precision="FP16"> <dim>72</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="49" name="MobilenetV3/expanded_conv_1/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_1/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>72</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_1/expand/weights/read:0"> <dim>72</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="50" name="MobilenetV3/expanded_conv_1/expand/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_1/expand/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>16</dim> <dim>56</dim> <dim>56</dim> </port> <port id="1" precision="FP32"> <dim>72</dim> <dim>16</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>56</dim> <dim>56</dim> </port> </output> </layer> <layer id="51" name="data_add_92479252_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 72, 1, 1" offset="4666" size="144"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="52" name="data_add_92479252" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_92479252"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="53" name="MobilenetV3/expanded_conv_1/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_1/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>56</dim> <dim>56</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_1/expand/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>72</dim> <dim>56</dim> <dim>56</dim> </port> </output> </layer> <layer id="54" name="MobilenetV3/expanded_conv_1/expand/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_1/expand/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>56</dim> <dim>56</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_1/expand/Relu:0,MobilenetV3/expanded_conv_1/expansion_output:0"> <dim>1</dim> <dim>72</dim> <dim>56</dim> <dim>56</dim> </port> </output> </layer> <layer id="55" name="MobilenetV3/expanded_conv_1/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="72, 1, 1, 3, 3" offset="4810" size="1296"/> <output> <port id="0" precision="FP16"> <dim>72</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </output> </layer> <layer id="56" name="MobilenetV3/expanded_conv_1/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_1/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>72</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_1/depthwise/depthwise_weights/read:0"> <dim>72</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </output> </layer> <layer id="57" name="MobilenetV3/expanded_conv_1/depthwise/depthwise" type="GroupConvolution" version="opset1"> <data strides="2, 2" pads_begin="0, 0" pads_end="0, 0" dilations="1, 1" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_1/depthwise/depthwise"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>56</dim> <dim>56</dim> </port> <port id="1" precision="FP32"> <dim>72</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="58" name="data_add_92559260_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 72, 1, 1" offset="6106" size="144"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="59" name="data_add_92559260" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_92559260"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="60" name="MobilenetV3/expanded_conv_1/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_1/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_1/depthwise/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>72</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="61" name="MobilenetV3/expanded_conv_1/depthwise/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_1/depthwise/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>28</dim> <dim>28</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_1/depthwise/Relu:0,MobilenetV3/expanded_conv_1/depthwise_output:0"> <dim>1</dim> <dim>72</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="62" name="MobilenetV3/expanded_conv_1/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="24, 72, 1, 1" offset="6250" size="3456"/> <output> <port id="0" precision="FP16"> <dim>24</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="63" name="MobilenetV3/expanded_conv_1/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_1/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>24</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_1/project/weights/read:0"> <dim>24</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="64" name="MobilenetV3/expanded_conv_1/project/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_1/project/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>24</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="65" name="data_add_92639268_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 24, 1, 1" offset="9706" size="48"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="66" name="data_add_92639268" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_92639268"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="67" name="MobilenetV3/expanded_conv_1/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_1/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_1/output:0,MobilenetV3/expanded_conv_1/project/BatchNorm/FusedBatchNorm:0,MobilenetV3/expanded_conv_1/project/Identity:0,MobilenetV3/expanded_conv_2/input:0"> <dim>1</dim> <dim>24</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="68" name="MobilenetV3/expanded_conv_2/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="88, 24, 1, 1" offset="9754" size="4224"/> <output> <port id="0" precision="FP16"> <dim>88</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="69" name="MobilenetV3/expanded_conv_2/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>88</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_2/expand/weights/read:0"> <dim>88</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="70" name="MobilenetV3/expanded_conv_2/expand/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/expand/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>88</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="71" name="data_add_92719276_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 88, 1, 1" offset="13978" size="176"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="72" name="data_add_92719276" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_92719276"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="73" name="MobilenetV3/expanded_conv_2/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_2/expand/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="74" name="MobilenetV3/expanded_conv_2/expand/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/expand/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_2/expand/Relu:0,MobilenetV3/expanded_conv_2/expansion_output:0"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="75" name="MobilenetV3/expanded_conv_2/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="88, 1, 1, 3, 3" offset="14154" size="1584"/> <output> <port id="0" precision="FP16"> <dim>88</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </output> </layer> <layer id="76" name="MobilenetV3/expanded_conv_2/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>88</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_2/depthwise/depthwise_weights/read:0"> <dim>88</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </output> </layer> <layer id="77" name="MobilenetV3/expanded_conv_2/depthwise/depthwise" type="GroupConvolution" version="opset1"> <data strides="1, 1" pads_begin="0, 0" pads_end="0, 0" dilations="1, 1" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/depthwise/depthwise"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>88</dim> <dim>1</dim> <dim>1</dim> <dim>3</dim> <dim>3</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="78" name="data_add_92799284_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 88, 1, 1" offset="15738" size="176"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="79" name="data_add_92799284" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_92799284"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="80" name="MobilenetV3/expanded_conv_2/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_2/depthwise/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="81" name="MobilenetV3/expanded_conv_2/depthwise/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/depthwise/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_2/depthwise/Relu:0,MobilenetV3/expanded_conv_2/depthwise_output:0"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="82" name="MobilenetV3/expanded_conv_2/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="24, 88, 1, 1" offset="15914" size="4224"/> <output> <port id="0" precision="FP16"> <dim>24</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="83" name="MobilenetV3/expanded_conv_2/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>24</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_2/project/weights/read:0"> <dim>24</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="84" name="MobilenetV3/expanded_conv_2/project/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/project/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>88</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>24</dim> <dim>88</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="85" name="data_add_92879292_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 24, 1, 1" offset="20138" size="48"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="86" name="data_add_92879292" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_92879292"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="87" name="MobilenetV3/expanded_conv_2/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_2/project/BatchNorm/FusedBatchNorm:0,MobilenetV3/expanded_conv_2/project/Identity:0"> <dim>1</dim> <dim>24</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="88" name="MobilenetV3/expanded_conv_2/add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_2/add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>28</dim> <dim>28</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_2/add:0,MobilenetV3/expanded_conv_2/output:0,MobilenetV3/expanded_conv_3/input:0"> <dim>1</dim> <dim>24</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="89" name="MobilenetV3/expanded_conv_3/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="96, 24, 1, 1" offset="20186" size="4608"/> <output> <port id="0" precision="FP16"> <dim>96</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="90" name="MobilenetV3/expanded_conv_3/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>96</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/expand/weights/read:0"> <dim>96</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="91" name="MobilenetV3/expanded_conv_3/expand/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/expand/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>96</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="92" name="data_add_92959300_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 96, 1, 1" offset="24794" size="192"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="93" name="data_add_92959300" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_92959300"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="94" name="MobilenetV3/expanded_conv_3/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_3/expand/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>96</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="95" name="MobilenetV3/expanded_conv_3/expand/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/expand/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>28</dim> <dim>28</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/expand/hard_swish/mul_1:0,MobilenetV3/expanded_conv_3/expansion_output:0"> <dim>1</dim> <dim>96</dim> <dim>28</dim> <dim>28</dim> </port> </output> </layer> <layer id="96" name="MobilenetV3/expanded_conv_3/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="96, 1, 1, 5, 5" offset="24986" size="4800"/> <output> <port id="0" precision="FP16"> <dim>96</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="97" name="MobilenetV3/expanded_conv_3/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>96</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/depthwise/depthwise_weights/read:0"> <dim>96</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="98" name="MobilenetV3/expanded_conv_3/depthwise/depthwise" type="GroupConvolution" version="opset1"> <data strides="2, 2" pads_begin="0, 0" pads_end="0, 0" dilations="1, 1" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/depthwise/depthwise"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>28</dim> <dim>28</dim> </port> <port id="1" precision="FP32"> <dim>96</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="99" name="data_add_93039308_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 96, 1, 1" offset="29786" size="192"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="100" name="data_add_93039308" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93039308"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="101" name="MobilenetV3/expanded_conv_3/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_3/depthwise/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>96</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="102" name="MobilenetV3/expanded_conv_3/depthwise/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/depthwise/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/depthwise/hard_swish/mul_1:0,MobilenetV3/expanded_conv_3/depthwise_output:0"> <dim>1</dim> <dim>96</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="103" name="MobilenetV3/expanded_conv_3/squeeze_excite/AvgPool" type="AvgPool" version="opset1"> <data kernel="14, 14" strides="1, 1" pads_begin="0, 0" pads_end="0, 0" exclude-pad="true" auto_pad="valid" rounding_type="floor"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/AvgPool"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/AvgPool:0"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="104" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="24, 96, 1, 1" offset="29978" size="4608"/> <output> <port id="0" precision="FP16"> <dim>24</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="105" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>24</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/weights/read:0"> <dim>24</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="106" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>24</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/Conv2D:0"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="107" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/biases/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 24, 1, 1" offset="34586" size="48"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="108" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/biases/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/biases/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/biases/read:0"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="109" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/BiasAdd/Add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/BiasAdd/Add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/BiasAdd:0"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="110" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/Conv/Relu:0"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="111" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="96, 24, 1, 1" offset="34634" size="4608"/> <output> <port id="0" precision="FP16"> <dim>96</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="112" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>96</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/weights/read:0"> <dim>96</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="113" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>96</dim> <dim>24</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/Conv2D:0"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="114" name="data_add_93109312_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 96, 1, 1" offset="39242" size="192"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="115" name="data_add_93109312" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93109312"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="116" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/add:0"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="117" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/Relu6" type="Clamp" version="opset1"> <data min="0" max="6"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/Relu6"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/Relu6:0"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="118" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul/y_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1, 1, 1" offset="1816" size="2"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="119" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul/y" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul/y"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul/y:0"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="120" name="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul:0,MobilenetV3/expanded_conv_3/squeeze_excite/excite:0"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="121" name="MobilenetV3/expanded_conv_3/squeeze_excite/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/squeeze_excite/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_3/squeeze_excite/mul:0"> <dim>1</dim> <dim>96</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="122" name="MobilenetV3/expanded_conv_3/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="40, 96, 1, 1" offset="39434" size="7680"/> <output> <port id="0" precision="FP16"> <dim>40</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="123" name="MobilenetV3/expanded_conv_3/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>40</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_3/project/weights/read:0"> <dim>40</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="124" name="MobilenetV3/expanded_conv_3/project/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/project/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>40</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="125" name="data_add_93159320_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 40, 1, 1" offset="47114" size="80"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="126" name="data_add_93159320" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93159320"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="127" name="MobilenetV3/expanded_conv_3/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_3/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_3/output:0,MobilenetV3/expanded_conv_3/project/BatchNorm/FusedBatchNorm:0,MobilenetV3/expanded_conv_3/project/Identity:0,MobilenetV3/expanded_conv_4/input:0"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="128" name="MobilenetV3/expanded_conv_4/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="240, 40, 1, 1" offset="47194" size="19200"/> <output> <port id="0" precision="FP16"> <dim>240</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="129" name="MobilenetV3/expanded_conv_4/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>240</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/expand/weights/read:0"> <dim>240</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="130" name="MobilenetV3/expanded_conv_4/expand/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/expand/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>240</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="131" name="data_add_93239328_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 240, 1, 1" offset="66394" size="480"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="132" name="data_add_93239328" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93239328"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="133" name="MobilenetV3/expanded_conv_4/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_4/expand/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="134" name="MobilenetV3/expanded_conv_4/expand/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/expand/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/expand/hard_swish/mul_1:0,MobilenetV3/expanded_conv_4/expansion_output:0"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="135" name="MobilenetV3/expanded_conv_4/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="240, 1, 1, 5, 5" offset="66874" size="12000"/> <output> <port id="0" precision="FP16"> <dim>240</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="136" name="MobilenetV3/expanded_conv_4/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>240</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/depthwise/depthwise_weights/read:0"> <dim>240</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="137" name="MobilenetV3/expanded_conv_4/depthwise/depthwise" type="GroupConvolution" version="opset1"> <data strides="1, 1" pads_begin="0, 0" pads_end="0, 0" dilations="1, 1" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/depthwise/depthwise"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>240</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="138" name="data_add_93319336_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 240, 1, 1" offset="78874" size="480"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="139" name="data_add_93319336" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93319336"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="140" name="MobilenetV3/expanded_conv_4/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_4/depthwise/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="141" name="MobilenetV3/expanded_conv_4/depthwise/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/depthwise/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/depthwise/hard_swish/mul_1:0,MobilenetV3/expanded_conv_4/depthwise_output:0"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="142" name="MobilenetV3/expanded_conv_4/squeeze_excite/AvgPool" type="AvgPool" version="opset1"> <data kernel="14, 14" strides="1, 1" pads_begin="0, 0" pads_end="0, 0" exclude-pad="true" auto_pad="valid" rounding_type="floor"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/AvgPool"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/AvgPool:0"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="143" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="64, 240, 1, 1" offset="79354" size="30720"/> <output> <port id="0" precision="FP16"> <dim>64</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="144" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>64</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/weights/read:0"> <dim>64</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="145" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>64</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/Conv2D:0"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="146" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/biases/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 64, 1, 1" offset="110074" size="128"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="147" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/biases/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/biases/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/biases/read:0"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="148" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/BiasAdd/Add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/BiasAdd/Add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/BiasAdd:0"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="149" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/Conv/Relu:0"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="150" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="240, 64, 1, 1" offset="110202" size="30720"/> <output> <port id="0" precision="FP16"> <dim>240</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="151" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>240</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/weights/read:0"> <dim>240</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="152" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>240</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/Conv2D:0"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="153" name="data_add_93389340_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 240, 1, 1" offset="140922" size="480"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="154" name="data_add_93389340" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93389340"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="155" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/add:0"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="156" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/Relu6" type="Clamp" version="opset1"> <data min="0" max="6"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/Relu6"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/Relu6:0"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="157" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul/y_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1, 1, 1" offset="1816" size="2"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="158" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul/y" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul/y"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul/y:0"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="159" name="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul:0,MobilenetV3/expanded_conv_4/squeeze_excite/excite:0"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="160" name="MobilenetV3/expanded_conv_4/squeeze_excite/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/squeeze_excite/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_4/squeeze_excite/mul:0"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="161" name="MobilenetV3/expanded_conv_4/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="40, 240, 1, 1" offset="141402" size="19200"/> <output> <port id="0" precision="FP16"> <dim>40</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="162" name="MobilenetV3/expanded_conv_4/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>40</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_4/project/weights/read:0"> <dim>40</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="163" name="MobilenetV3/expanded_conv_4/project/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/project/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>40</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="164" name="data_add_93439348_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 40, 1, 1" offset="160602" size="80"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="165" name="data_add_93439348" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93439348"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="166" name="MobilenetV3/expanded_conv_4/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_4/project/BatchNorm/FusedBatchNorm:0,MobilenetV3/expanded_conv_4/project/Identity:0"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="167" name="MobilenetV3/expanded_conv_4/add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_4/add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_4/add:0,MobilenetV3/expanded_conv_4/output:0,MobilenetV3/expanded_conv_5/input:0"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="168" name="MobilenetV3/expanded_conv_5/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="240, 40, 1, 1" offset="160682" size="19200"/> <output> <port id="0" precision="FP16"> <dim>240</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="169" name="MobilenetV3/expanded_conv_5/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>240</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/expand/weights/read:0"> <dim>240</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="170" name="MobilenetV3/expanded_conv_5/expand/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/expand/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>240</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="171" name="data_add_93519356_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 240, 1, 1" offset="179882" size="480"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="172" name="data_add_93519356" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93519356"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="173" name="MobilenetV3/expanded_conv_5/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_5/expand/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="174" name="MobilenetV3/expanded_conv_5/expand/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/expand/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/expand/hard_swish/mul_1:0,MobilenetV3/expanded_conv_5/expansion_output:0"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="175" name="MobilenetV3/expanded_conv_5/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="240, 1, 1, 5, 5" offset="180362" size="12000"/> <output> <port id="0" precision="FP16"> <dim>240</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="176" name="MobilenetV3/expanded_conv_5/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>240</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/depthwise/depthwise_weights/read:0"> <dim>240</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="177" name="MobilenetV3/expanded_conv_5/depthwise/depthwise" type="GroupConvolution" version="opset1"> <data strides="1, 1" pads_begin="0, 0" pads_end="0, 0" dilations="1, 1" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/depthwise/depthwise"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>240</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="178" name="data_add_93599364_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 240, 1, 1" offset="192362" size="480"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="179" name="data_add_93599364" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93599364"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="180" name="MobilenetV3/expanded_conv_5/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_5/depthwise/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="181" name="MobilenetV3/expanded_conv_5/depthwise/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/depthwise/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/depthwise/hard_swish/mul_1:0,MobilenetV3/expanded_conv_5/depthwise_output:0"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="182" name="MobilenetV3/expanded_conv_5/squeeze_excite/AvgPool" type="AvgPool" version="opset1"> <data kernel="14, 14" strides="1, 1" pads_begin="0, 0" pads_end="0, 0" exclude-pad="true" auto_pad="valid" rounding_type="floor"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/AvgPool"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/AvgPool:0"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="183" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="64, 240, 1, 1" offset="192842" size="30720"/> <output> <port id="0" precision="FP16"> <dim>64</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="184" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>64</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/weights/read:0"> <dim>64</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="185" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>64</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/Conv2D:0"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="186" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/biases/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 64, 1, 1" offset="223562" size="128"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="187" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/biases/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/biases/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/biases/read:0"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="188" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/BiasAdd/Add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/BiasAdd/Add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/BiasAdd:0"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="189" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/Conv/Relu:0"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="190" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="240, 64, 1, 1" offset="223690" size="30720"/> <output> <port id="0" precision="FP16"> <dim>240</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="191" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>240</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/weights/read:0"> <dim>240</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="192" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>240</dim> <dim>64</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/Conv2D:0"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="193" name="data_add_93669368_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 240, 1, 1" offset="254410" size="480"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="194" name="data_add_93669368" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93669368"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="195" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/add:0"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="196" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/Relu6" type="Clamp" version="opset1"> <data min="0" max="6"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/Relu6"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/Relu6:0"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="197" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul/y_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1, 1, 1" offset="1816" size="2"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="198" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul/y" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul/y"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul/y:0"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="199" name="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul:0,MobilenetV3/expanded_conv_5/squeeze_excite/excite:0"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="200" name="MobilenetV3/expanded_conv_5/squeeze_excite/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/squeeze_excite/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_5/squeeze_excite/mul:0"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="201" name="MobilenetV3/expanded_conv_5/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="40, 240, 1, 1" offset="254890" size="19200"/> <output> <port id="0" precision="FP16"> <dim>40</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="202" name="MobilenetV3/expanded_conv_5/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>40</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_5/project/weights/read:0"> <dim>40</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="203" name="MobilenetV3/expanded_conv_5/project/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/project/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>240</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>40</dim> <dim>240</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="204" name="data_add_93719376_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 40, 1, 1" offset="274090" size="80"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="205" name="data_add_93719376" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93719376"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="206" name="MobilenetV3/expanded_conv_5/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_5/project/BatchNorm/FusedBatchNorm:0,MobilenetV3/expanded_conv_5/project/Identity:0"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="207" name="MobilenetV3/expanded_conv_5/add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_5/add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_5/add:0,MobilenetV3/expanded_conv_5/output:0,MobilenetV3/expanded_conv_6/input:0"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="208" name="MobilenetV3/expanded_conv_6/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="120, 40, 1, 1" offset="274170" size="9600"/> <output> <port id="0" precision="FP16"> <dim>120</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="209" name="MobilenetV3/expanded_conv_6/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>120</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/expand/weights/read:0"> <dim>120</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="210" name="MobilenetV3/expanded_conv_6/expand/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/expand/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>120</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="211" name="data_add_93799384_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 120, 1, 1" offset="283770" size="240"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="212" name="data_add_93799384" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93799384"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="213" name="MobilenetV3/expanded_conv_6/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_6/expand/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="214" name="MobilenetV3/expanded_conv_6/expand/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/expand/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/expand/hard_swish/mul_1:0,MobilenetV3/expanded_conv_6/expansion_output:0"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="215" name="MobilenetV3/expanded_conv_6/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="120, 1, 1, 5, 5" offset="284010" size="6000"/> <output> <port id="0" precision="FP16"> <dim>120</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="216" name="MobilenetV3/expanded_conv_6/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>120</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/depthwise/depthwise_weights/read:0"> <dim>120</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="217" name="MobilenetV3/expanded_conv_6/depthwise/depthwise" type="GroupConvolution" version="opset1"> <data strides="1, 1" pads_begin="0, 0" pads_end="0, 0" dilations="1, 1" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/depthwise/depthwise"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>120</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="218" name="data_add_93879392_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 120, 1, 1" offset="290010" size="240"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="219" name="data_add_93879392" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93879392"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="220" name="MobilenetV3/expanded_conv_6/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_6/depthwise/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="221" name="MobilenetV3/expanded_conv_6/depthwise/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/depthwise/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/depthwise/hard_swish/mul_1:0,MobilenetV3/expanded_conv_6/depthwise_output:0"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="222" name="MobilenetV3/expanded_conv_6/squeeze_excite/AvgPool" type="AvgPool" version="opset1"> <data kernel="14, 14" strides="1, 1" pads_begin="0, 0" pads_end="0, 0" exclude-pad="true" auto_pad="valid" rounding_type="floor"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/AvgPool"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/AvgPool:0"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="223" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="32, 120, 1, 1" offset="290250" size="7680"/> <output> <port id="0" precision="FP16"> <dim>32</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="224" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>32</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/weights/read:0"> <dim>32</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="225" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>32</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/Conv2D:0"> <dim>1</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="226" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/biases/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 32, 1, 1" offset="297930" size="64"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="227" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/biases/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/biases/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/biases/read:0"> <dim>1</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="228" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/BiasAdd/Add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/BiasAdd/Add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/BiasAdd:0"> <dim>1</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="229" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/Conv/Relu:0"> <dim>1</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="230" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="120, 32, 1, 1" offset="297994" size="7680"/> <output> <port id="0" precision="FP16"> <dim>120</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="231" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>120</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/weights/read:0"> <dim>120</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="232" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>120</dim> <dim>32</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/Conv2D:0"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="233" name="data_add_93949396_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 120, 1, 1" offset="305674" size="240"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="234" name="data_add_93949396" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93949396"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="235" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/add:0"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="236" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/Relu6" type="Clamp" version="opset1"> <data min="0" max="6"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/Relu6"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/Relu6:0"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="237" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/mul/y_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1, 1, 1" offset="1816" size="2"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="238" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/mul/y" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/mul/y"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/mul/y:0"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="239" name="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/Conv_1/mul:0,MobilenetV3/expanded_conv_6/squeeze_excite/excite:0"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="240" name="MobilenetV3/expanded_conv_6/squeeze_excite/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/squeeze_excite/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_6/squeeze_excite/mul:0"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="241" name="MobilenetV3/expanded_conv_6/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="48, 120, 1, 1" offset="305914" size="11520"/> <output> <port id="0" precision="FP16"> <dim>48</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="242" name="MobilenetV3/expanded_conv_6/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>48</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_6/project/weights/read:0"> <dim>48</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="243" name="MobilenetV3/expanded_conv_6/project/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/project/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>120</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>48</dim> <dim>120</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="244" name="data_add_93999404_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 48, 1, 1" offset="317434" size="96"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="245" name="data_add_93999404" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_93999404"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="246" name="MobilenetV3/expanded_conv_6/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_6/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_6/output:0,MobilenetV3/expanded_conv_6/project/BatchNorm/FusedBatchNorm:0,MobilenetV3/expanded_conv_6/project/Identity:0,MobilenetV3/expanded_conv_7/input:0"> <dim>1</dim> <dim>48</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="247" name="MobilenetV3/expanded_conv_7/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="144, 48, 1, 1" offset="317530" size="13824"/> <output> <port id="0" precision="FP16"> <dim>144</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="248" name="MobilenetV3/expanded_conv_7/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>144</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/expand/weights/read:0"> <dim>144</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="249" name="MobilenetV3/expanded_conv_7/expand/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/expand/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>144</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="250" name="data_add_94079412_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 144, 1, 1" offset="331354" size="288"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="251" name="data_add_94079412" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94079412"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="252" name="MobilenetV3/expanded_conv_7/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_7/expand/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="253" name="MobilenetV3/expanded_conv_7/expand/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/expand/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/expand/hard_swish/mul_1:0,MobilenetV3/expanded_conv_7/expansion_output:0"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="254" name="MobilenetV3/expanded_conv_7/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="144, 1, 1, 5, 5" offset="331642" size="7200"/> <output> <port id="0" precision="FP16"> <dim>144</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="255" name="MobilenetV3/expanded_conv_7/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>144</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/depthwise/depthwise_weights/read:0"> <dim>144</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="256" name="MobilenetV3/expanded_conv_7/depthwise/depthwise" type="GroupConvolution" version="opset1"> <data strides="1, 1" pads_begin="0, 0" pads_end="0, 0" dilations="1, 1" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/depthwise/depthwise"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>144</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="257" name="data_add_94159420_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 144, 1, 1" offset="338842" size="288"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="258" name="data_add_94159420" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94159420"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="259" name="MobilenetV3/expanded_conv_7/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_7/depthwise/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="260" name="MobilenetV3/expanded_conv_7/depthwise/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/depthwise/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/depthwise/hard_swish/mul_1:0,MobilenetV3/expanded_conv_7/depthwise_output:0"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="261" name="MobilenetV3/expanded_conv_7/squeeze_excite/AvgPool" type="AvgPool" version="opset1"> <data kernel="14, 14" strides="1, 1" pads_begin="0, 0" pads_end="0, 0" exclude-pad="true" auto_pad="valid" rounding_type="floor"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/AvgPool"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/AvgPool:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="262" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="40, 144, 1, 1" offset="339130" size="11520"/> <output> <port id="0" precision="FP16"> <dim>40</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="263" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>40</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/weights/read:0"> <dim>40</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="264" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>40</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/Conv2D:0"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="265" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/biases/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 40, 1, 1" offset="350650" size="80"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="266" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/biases/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/biases/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/biases/read:0"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="267" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/BiasAdd/Add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/BiasAdd/Add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/BiasAdd:0"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="268" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/Conv/Relu:0"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="269" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="144, 40, 1, 1" offset="350730" size="11520"/> <output> <port id="0" precision="FP16"> <dim>144</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="270" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>144</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/weights/read:0"> <dim>144</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="271" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>144</dim> <dim>40</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/Conv2D:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="272" name="data_add_94229424_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 144, 1, 1" offset="362250" size="288"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="273" name="data_add_94229424" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94229424"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="274" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/add:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="275" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/Relu6" type="Clamp" version="opset1"> <data min="0" max="6"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/Relu6"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/Relu6:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="276" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/mul/y_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1, 1, 1" offset="1816" size="2"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="277" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/mul/y" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/mul/y"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/mul/y:0"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="278" name="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/Conv_1/mul:0,MobilenetV3/expanded_conv_7/squeeze_excite/excite:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="279" name="MobilenetV3/expanded_conv_7/squeeze_excite/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/squeeze_excite/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_7/squeeze_excite/mul:0"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="280" name="MobilenetV3/expanded_conv_7/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="48, 144, 1, 1" offset="362538" size="13824"/> <output> <port id="0" precision="FP16"> <dim>48</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="281" name="MobilenetV3/expanded_conv_7/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>48</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_7/project/weights/read:0"> <dim>48</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="282" name="MobilenetV3/expanded_conv_7/project/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/project/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>48</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="283" name="data_add_94279432_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 48, 1, 1" offset="376362" size="96"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="284" name="data_add_94279432" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94279432"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="285" name="MobilenetV3/expanded_conv_7/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_7/project/BatchNorm/FusedBatchNorm:0,MobilenetV3/expanded_conv_7/project/Identity:0"> <dim>1</dim> <dim>48</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="286" name="MobilenetV3/expanded_conv_7/add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_7/add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_7/add:0,MobilenetV3/expanded_conv_7/output:0,MobilenetV3/expanded_conv_8/input:0"> <dim>1</dim> <dim>48</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="287" name="MobilenetV3/expanded_conv_8/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="288, 48, 1, 1" offset="376458" size="27648"/> <output> <port id="0" precision="FP16"> <dim>288</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="288" name="MobilenetV3/expanded_conv_8/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>288</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/expand/weights/read:0"> <dim>288</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="289" name="MobilenetV3/expanded_conv_8/expand/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/expand/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>48</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>288</dim> <dim>48</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="290" name="data_add_94359440_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 288, 1, 1" offset="404106" size="576"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="291" name="data_add_94359440" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94359440"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="292" name="MobilenetV3/expanded_conv_8/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_8/expand/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>288</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="293" name="MobilenetV3/expanded_conv_8/expand/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/expand/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>14</dim> <dim>14</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/expand/hard_swish/mul_1:0,MobilenetV3/expanded_conv_8/expansion_output:0"> <dim>1</dim> <dim>288</dim> <dim>14</dim> <dim>14</dim> </port> </output> </layer> <layer id="294" name="MobilenetV3/expanded_conv_8/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="288, 1, 1, 5, 5" offset="404682" size="14400"/> <output> <port id="0" precision="FP16"> <dim>288</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="295" name="MobilenetV3/expanded_conv_8/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>288</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/depthwise/depthwise_weights/read:0"> <dim>288</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="296" name="MobilenetV3/expanded_conv_8/depthwise/depthwise" type="GroupConvolution" version="opset1"> <data strides="2, 2" pads_begin="0, 0" pads_end="0, 0" dilations="1, 1" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/depthwise/depthwise"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>14</dim> <dim>14</dim> </port> <port id="1" precision="FP32"> <dim>288</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="297" name="data_add_94439448_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 288, 1, 1" offset="419082" size="576"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="298" name="data_add_94439448" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94439448"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="299" name="MobilenetV3/expanded_conv_8/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_8/depthwise/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>288</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="300" name="MobilenetV3/expanded_conv_8/depthwise/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/depthwise/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/depthwise/hard_swish/mul_1:0,MobilenetV3/expanded_conv_8/depthwise_output:0"> <dim>1</dim> <dim>288</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="301" name="MobilenetV3/expanded_conv_8/squeeze_excite/AvgPool" type="AvgPool" version="opset1"> <data kernel="7, 7" strides="1, 1" pads_begin="0, 0" pads_end="0, 0" exclude-pad="true" auto_pad="valid" rounding_type="floor"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/AvgPool"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/AvgPool:0"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="302" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="72, 288, 1, 1" offset="419658" size="41472"/> <output> <port id="0" precision="FP16"> <dim>72</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="303" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>72</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/weights/read:0"> <dim>72</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="304" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>72</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/Conv2D:0"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="305" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/biases/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 72, 1, 1" offset="461130" size="144"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="306" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/biases/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/biases/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/biases/read:0"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="307" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/BiasAdd/Add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/BiasAdd/Add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/BiasAdd:0"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="308" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/Conv/Relu:0"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="309" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="288, 72, 1, 1" offset="461274" size="41472"/> <output> <port id="0" precision="FP16"> <dim>288</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="310" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>288</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/weights/read:0"> <dim>288</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="311" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>288</dim> <dim>72</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/Conv2D:0"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="312" name="data_add_94509452_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 288, 1, 1" offset="502746" size="576"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="313" name="data_add_94509452" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94509452"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="314" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/add:0"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="315" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/Relu6" type="Clamp" version="opset1"> <data min="0" max="6"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/Relu6"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/Relu6:0"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="316" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/mul/y_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1, 1, 1" offset="1816" size="2"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="317" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/mul/y" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/mul/y"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/mul/y:0"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="318" name="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/Conv_1/mul:0,MobilenetV3/expanded_conv_8/squeeze_excite/excite:0"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="319" name="MobilenetV3/expanded_conv_8/squeeze_excite/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/squeeze_excite/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_8/squeeze_excite/mul:0"> <dim>1</dim> <dim>288</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="320" name="MobilenetV3/expanded_conv_8/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="96, 288, 1, 1" offset="503322" size="55296"/> <output> <port id="0" precision="FP16"> <dim>96</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="321" name="MobilenetV3/expanded_conv_8/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>96</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_8/project/weights/read:0"> <dim>96</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="322" name="MobilenetV3/expanded_conv_8/project/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/project/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>288</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>96</dim> <dim>288</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="323" name="data_add_94559460_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 96, 1, 1" offset="558618" size="192"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="324" name="data_add_94559460" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94559460"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="325" name="MobilenetV3/expanded_conv_8/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_8/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_8/output:0,MobilenetV3/expanded_conv_8/project/BatchNorm/FusedBatchNorm:0,MobilenetV3/expanded_conv_8/project/Identity:0,MobilenetV3/expanded_conv_9/input:0"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="326" name="MobilenetV3/expanded_conv_9/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="576, 96, 1, 1" offset="558810" size="110592"/> <output> <port id="0" precision="FP16"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="327" name="MobilenetV3/expanded_conv_9/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/expand/weights/read:0"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="328" name="MobilenetV3/expanded_conv_9/expand/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/expand/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="329" name="data_add_94639468_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 576, 1, 1" offset="669402" size="1152"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="330" name="data_add_94639468" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94639468"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="331" name="MobilenetV3/expanded_conv_9/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_9/expand/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="332" name="MobilenetV3/expanded_conv_9/expand/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/expand/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/expand/hard_swish/mul_1:0,MobilenetV3/expanded_conv_9/expansion_output:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="333" name="MobilenetV3/expanded_conv_9/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="576, 1, 1, 5, 5" offset="670554" size="28800"/> <output> <port id="0" precision="FP16"> <dim>576</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="334" name="MobilenetV3/expanded_conv_9/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>576</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/depthwise/depthwise_weights/read:0"> <dim>576</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="335" name="MobilenetV3/expanded_conv_9/depthwise/depthwise" type="GroupConvolution" version="opset1"> <data strides="1, 1" pads_begin="0, 0" pads_end="0, 0" dilations="1, 1" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/depthwise/depthwise"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>576</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="336" name="data_add_94719476_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 576, 1, 1" offset="699354" size="1152"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="337" name="data_add_94719476" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94719476"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="338" name="MobilenetV3/expanded_conv_9/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_9/depthwise/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="339" name="MobilenetV3/expanded_conv_9/depthwise/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/depthwise/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/depthwise/hard_swish/mul_1:0,MobilenetV3/expanded_conv_9/depthwise_output:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="340" name="MobilenetV3/expanded_conv_9/squeeze_excite/AvgPool" type="AvgPool" version="opset1"> <data kernel="7, 7" strides="1, 1" pads_begin="0, 0" pads_end="0, 0" exclude-pad="true" auto_pad="valid" rounding_type="floor"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/AvgPool"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/AvgPool:0"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="341" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="144, 576, 1, 1" offset="700506" size="165888"/> <output> <port id="0" precision="FP16"> <dim>144</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="342" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>144</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/weights/read:0"> <dim>144</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="343" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>144</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/Conv2D:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="344" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/biases/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 144, 1, 1" offset="866394" size="288"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="345" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/biases/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/biases/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/biases/read:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="346" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/BiasAdd/Add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/BiasAdd/Add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/BiasAdd:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="347" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/Conv/Relu:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="348" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="576, 144, 1, 1" offset="866682" size="165888"/> <output> <port id="0" precision="FP16"> <dim>576</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="349" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>576</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/weights/read:0"> <dim>576</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="350" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>576</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/Conv2D:0"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="351" name="data_add_94789480_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 576, 1, 1" offset="1032570" size="1152"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="352" name="data_add_94789480" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94789480"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="353" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/add:0"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="354" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/Relu6" type="Clamp" version="opset1"> <data min="0" max="6"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/Relu6"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/Relu6:0"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="355" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/mul/y_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1, 1, 1" offset="1816" size="2"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="356" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/mul/y" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/mul/y"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/mul/y:0"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="357" name="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/Conv_1/mul:0,MobilenetV3/expanded_conv_9/squeeze_excite/excite:0"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="358" name="MobilenetV3/expanded_conv_9/squeeze_excite/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/squeeze_excite/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_9/squeeze_excite/mul:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="359" name="MobilenetV3/expanded_conv_9/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="96, 576, 1, 1" offset="1033722" size="110592"/> <output> <port id="0" precision="FP16"> <dim>96</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="360" name="MobilenetV3/expanded_conv_9/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>96</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_9/project/weights/read:0"> <dim>96</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="361" name="MobilenetV3/expanded_conv_9/project/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/project/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>96</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="362" name="data_add_94839488_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 96, 1, 1" offset="1144314" size="192"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="363" name="data_add_94839488" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94839488"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="364" name="MobilenetV3/expanded_conv_9/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_9/project/BatchNorm/FusedBatchNorm:0,MobilenetV3/expanded_conv_9/project/Identity:0"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="365" name="MobilenetV3/expanded_conv_9/add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_9/add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_10/input:0,MobilenetV3/expanded_conv_9/add:0,MobilenetV3/expanded_conv_9/output:0"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="366" name="MobilenetV3/expanded_conv_10/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="576, 96, 1, 1" offset="1144506" size="110592"/> <output> <port id="0" precision="FP16"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="367" name="MobilenetV3/expanded_conv_10/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/expand/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/expand/weights/read:0"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="368" name="MobilenetV3/expanded_conv_10/expand/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/expand/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="369" name="data_add_94919496_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 576, 1, 1" offset="1255098" size="1152"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="370" name="data_add_94919496" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94919496"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="371" name="MobilenetV3/expanded_conv_10/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/expand/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_10/expand/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="372" name="MobilenetV3/expanded_conv_10/expand/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/expand/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/expand/hard_swish/mul_1:0,MobilenetV3/expanded_conv_10/expansion_output:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="373" name="MobilenetV3/expanded_conv_10/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="576, 1, 1, 5, 5" offset="1256250" size="28800"/> <output> <port id="0" precision="FP16"> <dim>576</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="374" name="MobilenetV3/expanded_conv_10/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/depthwise/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>576</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/depthwise/depthwise_weights/read:0"> <dim>576</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </output> </layer> <layer id="375" name="MobilenetV3/expanded_conv_10/depthwise/depthwise" type="GroupConvolution" version="opset1"> <data strides="1, 1" pads_begin="0, 0" pads_end="0, 0" dilations="1, 1" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/depthwise/depthwise"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>576</dim> <dim>1</dim> <dim>1</dim> <dim>5</dim> <dim>5</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="376" name="data_add_94999504_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 576, 1, 1" offset="1285050" size="1152"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="377" name="data_add_94999504" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_94999504"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="378" name="MobilenetV3/expanded_conv_10/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/depthwise/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_10/depthwise/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="379" name="MobilenetV3/expanded_conv_10/depthwise/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/depthwise/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/depthwise/hard_swish/mul_1:0,MobilenetV3/expanded_conv_10/depthwise_output:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="380" name="MobilenetV3/expanded_conv_10/squeeze_excite/AvgPool" type="AvgPool" version="opset1"> <data kernel="7, 7" strides="1, 1" pads_begin="0, 0" pads_end="0, 0" exclude-pad="true" auto_pad="valid" rounding_type="floor"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/AvgPool"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/AvgPool:0"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="381" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="144, 576, 1, 1" offset="1286202" size="165888"/> <output> <port id="0" precision="FP16"> <dim>144</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="382" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>144</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/weights/read:0"> <dim>144</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="383" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>144</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/Conv2D:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="384" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/biases/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 144, 1, 1" offset="1452090" size="288"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="385" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/biases/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/biases/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/biases/read:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="386" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/BiasAdd/Add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/BiasAdd/Add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/BiasAdd:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="387" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/Relu" type="ReLU" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/Relu"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/Conv/Relu:0"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="388" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="576, 144, 1, 1" offset="1452378" size="165888"/> <output> <port id="0" precision="FP16"> <dim>576</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="389" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>576</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/weights/read:0"> <dim>576</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="390" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>576</dim> <dim>144</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/Conv2D:0"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="391" name="data_add_95069508_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 576, 1, 1" offset="1618266" size="1152"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="392" name="data_add_95069508" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_95069508"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="393" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/BiasAdd/Add/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/add:0"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="394" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/Relu6" type="Clamp" version="opset1"> <data min="0" max="6"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/Relu6"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/Relu6:0"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="395" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul/y_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1, 1, 1" offset="1816" size="2"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="396" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul/y" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul/y"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul/y:0"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="397" name="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul:0,MobilenetV3/expanded_conv_10/squeeze_excite/excite:0"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="398" name="MobilenetV3/expanded_conv_10/squeeze_excite/mul" type="Multiply" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/squeeze_excite/mul"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_10/squeeze_excite/mul:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="399" name="MobilenetV3/expanded_conv_10/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="96, 576, 1, 1" offset="1619418" size="110592"/> <output> <port id="0" precision="FP16"> <dim>96</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="400" name="MobilenetV3/expanded_conv_10/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/project/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>96</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/expanded_conv_10/project/weights/read:0"> <dim>96</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="401" name="MobilenetV3/expanded_conv_10/project/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/project/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>96</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="402" name="data_add_95119516_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 96, 1, 1" offset="1730010" size="192"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="403" name="data_add_95119516" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_95119516"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="404" name="MobilenetV3/expanded_conv_10/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/project/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_10/project/BatchNorm/FusedBatchNorm:0,MobilenetV3/expanded_conv_10/project/Identity:0"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="405" name="MobilenetV3/expanded_conv_10/add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/expanded_conv_10/add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/expanded_conv_10/add:0,MobilenetV3/expanded_conv_10/output:0"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="406" name="MobilenetV3/Conv_1/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy_compressed" type="Const" version="opset1"> <data element_type="f16" shape="576, 96, 1, 1" offset="1730202" size="110592"/> <output> <port id="0" precision="FP16"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="407" name="MobilenetV3/Conv_1/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/Conv_1/BatchNorm/FusedBatchNorm/mean/Fused_Mul__copy"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/Conv_1/weights/read:0"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="408" name="MobilenetV3/Conv_1/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Conv_1/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>96</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>576</dim> <dim>96</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="409" name="data_add_95199524_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 576, 1, 1" offset="1840794" size="1152"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="410" name="data_add_95199524" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="data_add_95199524"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="411" name="MobilenetV3/Conv_1/BatchNorm/FusedBatchNorm/variance/Fused_Add_" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Conv_1/BatchNorm/FusedBatchNorm/variance/Fused_Add_"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/Conv_1/BatchNorm/FusedBatchNorm:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="412" name="MobilenetV3/Conv_1/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Conv_1/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/Conv_1/hard_swish/mul_1:0"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </output> </layer> <layer id="413" name="MobilenetV3/AvgPool2D/AvgPool" type="AvgPool" version="opset1"> <data kernel="7, 7" strides="1, 1" pads_begin="0, 0" pads_end="0, 0" exclude-pad="true" auto_pad="valid" rounding_type="floor"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/AvgPool2D/AvgPool"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>7</dim> <dim>7</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/AvgPool2D/AvgPool:0"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="414" name="MobilenetV3/Conv_2/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1024, 576, 1, 1" offset="1841946" size="1179648"/> <output> <port id="0" precision="FP16"> <dim>1024</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="415" name="MobilenetV3/Conv_2/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/Conv_2/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1024</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/Conv_2/weights/read:0"> <dim>1024</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="416" name="MobilenetV3/Conv_2/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Conv_2/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1024</dim> <dim>576</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/Conv_2/Conv2D:0"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="417" name="MobilenetV3/Conv_2/biases/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1024, 1, 1" offset="3021594" size="2048"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="418" name="MobilenetV3/Conv_2/biases/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/Conv_2/biases/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/Conv_2/biases/read:0"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="419" name="MobilenetV3/Conv_2/BiasAdd/Add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Conv_2/BiasAdd/Add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/Conv_2/BiasAdd:0"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="420" name="MobilenetV3/Conv_2/hard_swish/mul_1" type="HSwish" version="opset4"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Conv_2/hard_swish/mul_1"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/Conv_2/hard_swish/mul_1:0"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="421" name="MobilenetV3/Logits/AvgPool" type="AvgPool" version="opset1"> <data kernel="1, 1" strides="1, 1" pads_begin="0, 0" pads_end="0, 0" exclude-pad="true" auto_pad="valid" rounding_type="floor"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Logits/AvgPool"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/Logits/AvgPool:0,MobilenetV3/Logits/Dropout_1b/Identity:0"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="422" name="MobilenetV3/Logits/Conv2d_1c_1x1/weights/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1001, 1024, 1, 1" offset="3023642" size="2050048"/> <output> <port id="0" precision="FP16"> <dim>1001</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="423" name="MobilenetV3/Logits/Conv2d_1c_1x1/weights/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/Logits/Conv2d_1c_1x1/weights/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1001</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/Logits/Conv2d_1c_1x1/weights/read:0"> <dim>1001</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="424" name="MobilenetV3/Logits/Conv2d_1c_1x1/Conv2D" type="Convolution" version="opset1"> <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="same_upper"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Logits/Conv2d_1c_1x1/Conv2D"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1001</dim> <dim>1024</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/Logits/Conv2d_1c_1x1/Conv2D:0"> <dim>1</dim> <dim>1001</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="425" name="MobilenetV3/Logits/Conv2d_1c_1x1/biases/read_compressed" type="Const" version="opset1"> <data element_type="f16" shape="1, 1001, 1, 1" offset="5073690" size="2002"/> <output> <port id="0" precision="FP16"> <dim>1</dim> <dim>1001</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="426" name="MobilenetV3/Logits/Conv2d_1c_1x1/biases/read" type="Convert" version="opset1"> <data destination_type="f32"/> <rt_info> <attribute name="decompression" version="0"/> <attribute name="fused_names" version="0" value="MobilenetV3/Logits/Conv2d_1c_1x1/biases/read"/> </rt_info> <input> <port id="0" precision="FP16"> <dim>1</dim> <dim>1001</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/Logits/Conv2d_1c_1x1/biases/read:0"> <dim>1</dim> <dim>1001</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="427" name="MobilenetV3/Logits/Conv2d_1c_1x1/BiasAdd/Add" type="Add" version="opset1"> <data auto_broadcast="numpy"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Logits/Conv2d_1c_1x1/BiasAdd/Add"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>1001</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="FP32"> <dim>1</dim> <dim>1001</dim> <dim>1</dim> <dim>1</dim> </port> </input> <output> <port id="2" precision="FP32"> <dim>1</dim> <dim>1001</dim> <dim>1</dim> <dim>1</dim> </port> </output> </layer> <layer id="428" name="Constant_1268" type="Const" version="opset1"> <data element_type="i64" shape="2" offset="5075692" size="16"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Logits/Conv2d_1c_1x1/BiasAdd/Add/Transpose, MobilenetV3/Logits/SpatialSqueeze"/> </rt_info> <output> <port id="0" precision="I64"> <dim>2</dim> </port> </output> </layer> <layer id="429" name="Squeeze_1269" type="Squeeze" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Logits/Conv2d_1c_1x1/BiasAdd/Add/Transpose, MobilenetV3/Logits/SpatialSqueeze"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>1001</dim> <dim>1</dim> <dim>1</dim> </port> <port id="1" precision="I64"> <dim>2</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/Logits/SpatialSqueeze:0,MobilenetV3/Logits/result:0"> <dim>1</dim> <dim>1001</dim> </port> </output> </layer> <layer id="430" name="MobilenetV3/Predictions/Reshape/shape" type="Const" version="opset1"> <data element_type="i64" shape="2" offset="5075708" size="16"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Predictions/Reshape/shape"/> </rt_info> <output> <port id="0" precision="I64" names="MobilenetV3/Predictions/Reshape/shape:0"> <dim>2</dim> </port> </output> </layer> <layer id="431" name="MobilenetV3/Predictions/Reshape" type="Reshape" version="opset1"> <data special_zero="false"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Predictions/Reshape"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>1001</dim> </port> <port id="1" precision="I64"> <dim>2</dim> </port> </input> <output> <port id="2" precision="FP32" names="MobilenetV3/Predictions/Reshape:0"> <dim>1</dim> <dim>1001</dim> </port> </output> </layer> <layer id="432" name="MobilenetV3/Predictions/Softmax" type="SoftMax" version="opset8"> <data axis="1"/> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Predictions/Softmax"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>1001</dim> </port> </input> <output> <port id="1" precision="FP32" names="MobilenetV3/Predictions/Softmax:0"> <dim>1</dim> <dim>1001</dim> </port> </output> </layer> <layer id="433" name="MobilenetV3/Predictions/Softmax:0" type="Result" version="opset1"> <rt_info> <attribute name="fused_names" version="0" value="MobilenetV3/Predictions/Softmax:0"/> </rt_info> <input> <port id="0" precision="FP32"> <dim>1</dim> <dim>1001</dim> </port> </input> </layer> </layers> <edges> <edge from-layer="0" from-port="0" to-layer="2" to-port="0"/> <edge from-layer="1" from-port="0" to-layer="2" to-port="1"/> <edge from-layer="2" from-port="2" to-layer="5" to-port="0"/> <edge from-layer="3" from-port="0" to-layer="4" to-port="0"/> <edge from-layer="4" from-port="1" to-layer="5" to-port="1"/> <edge from-layer="5" from-port="2" to-layer="8" to-port="0"/> <edge from-layer="6" from-port="0" to-layer="7" to-port="0"/> <edge from-layer="7" from-port="1" to-layer="8" to-port="1"/> <edge from-layer="8" from-port="2" to-layer="11" to-port="0"/> <edge from-layer="9" from-port="0" to-layer="10" to-port="0"/> <edge from-layer="10" from-port="1" to-layer="11" to-port="1"/> <edge from-layer="11" from-port="2" to-layer="14" to-port="0"/> <edge from-layer="12" from-port="0" to-layer="13" to-port="0"/> <edge from-layer="13" from-port="1" to-layer="14" to-port="1"/> <edge from-layer="14" from-port="2" to-layer="15" to-port="0"/> <edge from-layer="15" from-port="1" to-layer="18" to-port="0"/> <edge from-layer="16" from-port="0" to-layer="17" to-port="0"/> <edge from-layer="17" from-port="1" to-layer="18" to-port="1"/> <edge from-layer="18" from-port="2" to-layer="21" to-port="0"/> <edge from-layer="19" from-port="0" to-layer="20" to-port="0"/> <edge from-layer="20" from-port="1" to-layer="21" to-port="1"/> <edge from-layer="21" from-port="2" to-layer="22" to-port="0"/> <edge from-layer="22" from-port="1" to-layer="23" to-port="0"/> <edge from-layer="22" from-port="1" to-layer="41" to-port="0"/> <edge from-layer="23" from-port="1" to-layer="26" to-port="0"/> <edge from-layer="24" from-port="0" to-layer="25" to-port="0"/> <edge from-layer="25" from-port="1" to-layer="26" to-port="1"/> <edge from-layer="26" from-port="2" to-layer="29" to-port="0"/> <edge from-layer="27" from-port="0" to-layer="28" to-port="0"/> <edge from-layer="28" from-port="1" to-layer="29" to-port="1"/> <edge from-layer="29" from-port="2" to-layer="30" to-port="0"/> <edge from-layer="30" from-port="1" to-layer="33" to-port="0"/> <edge from-layer="31" from-port="0" to-layer="32" to-port="0"/> <edge from-layer="32" from-port="1" to-layer="33" to-port="1"/> <edge from-layer="33" from-port="2" to-layer="36" to-port="0"/> <edge from-layer="34" from-port="0" to-layer="35" to-port="0"/> <edge from-layer="35" from-port="1" to-layer="36" to-port="1"/> <edge from-layer="36" from-port="2" to-layer="37" to-port="0"/> <edge from-layer="37" from-port="1" to-layer="40" to-port="0"/> <edge from-layer="38" from-port="0" to-layer="39" to-port="0"/> <edge from-layer="39" from-port="1" to-layer="40" to-port="1"/> <edge from-layer="40" from-port="2" to-layer="41" to-port="1"/> <edge from-layer="41" from-port="2" to-layer="44" to-port="0"/> <edge from-layer="42" from-port="0" to-layer="43" to-port="0"/> <edge from-layer="43" from-port="1" to-layer="44" to-port="1"/> <edge from-layer="44" from-port="2" to-layer="47" to-port="0"/> <edge from-layer="45" from-port="0" to-layer="46" to-port="0"/> <edge from-layer="46" from-port="1" to-layer="47" to-port="1"/> <edge from-layer="47" from-port="2" to-layer="50" to-port="0"/> <edge from-layer="48" from-port="0" to-layer="49" to-port="0"/> <edge from-layer="49" from-port="1" to-layer="50" to-port="1"/> <edge from-layer="50" from-port="2" to-layer="53" to-port="0"/> <edge from-layer="51" from-port="0" to-layer="52" to-port="0"/> <edge from-layer="52" from-port="1" to-layer="53" to-port="1"/> <edge from-layer="53" from-port="2" to-layer="54" to-port="0"/> <edge from-layer="54" from-port="1" to-layer="57" to-port="0"/> <edge from-layer="55" from-port="0" to-layer="56" to-port="0"/> <edge from-layer="56" from-port="1" to-layer="57" to-port="1"/> <edge from-layer="57" from-port="2" to-layer="60" to-port="0"/> <edge from-layer="58" from-port="0" to-layer="59" to-port="0"/> <edge from-layer="59" from-port="1" to-layer="60" to-port="1"/> <edge from-layer="60" from-port="2" to-layer="61" to-port="0"/> <edge from-layer="61" from-port="1" to-layer="64" to-port="0"/> <edge from-layer="62" from-port="0" to-layer="63" to-port="0"/> <edge from-layer="63" from-port="1" to-layer="64" to-port="1"/> <edge from-layer="64" from-port="2" to-layer="67" to-port="0"/> <edge from-layer="65" from-port="0" to-layer="66" to-port="0"/> <edge from-layer="66" from-port="1" to-layer="67" to-port="1"/> <edge from-layer="67" from-port="2" to-layer="70" to-port="0"/> <edge from-layer="67" from-port="2" to-layer="88" to-port="1"/> <edge from-layer="68" from-port="0" to-layer="69" to-port="0"/> <edge from-layer="69" from-port="1" to-layer="70" to-port="1"/> <edge from-layer="70" from-port="2" to-layer="73" to-port="0"/> <edge from-layer="71" from-port="0" to-layer="72" to-port="0"/> <edge from-layer="72" from-port="1" to-layer="73" to-port="1"/> <edge from-layer="73" from-port="2" to-layer="74" to-port="0"/> <edge from-layer="74" from-port="1" to-layer="77" to-port="0"/> <edge from-layer="75" from-port="0" to-layer="76" to-port="0"/> <edge from-layer="76" from-port="1" to-layer="77" to-port="1"/> <edge from-layer="77" from-port="2" to-layer="80" to-port="0"/> <edge from-layer="78" from-port="0" to-layer="79" to-port="0"/> <edge from-layer="79" from-port="1" to-layer="80" to-port="1"/> <edge from-layer="80" from-port="2" to-layer="81" to-port="0"/> <edge from-layer="81" from-port="1" to-layer="84" to-port="0"/> <edge from-layer="82" from-port="0" to-layer="83" to-port="0"/> <edge from-layer="83" from-port="1" to-layer="84" to-port="1"/> <edge from-layer="84" from-port="2" to-layer="87" to-port="0"/> <edge from-layer="85" from-port="0" to-layer="86" to-port="0"/> <edge from-layer="86" from-port="1" to-layer="87" to-port="1"/> <edge from-layer="87" from-port="2" to-layer="88" to-port="0"/> <edge from-layer="88" from-port="2" to-layer="91" to-port="0"/> <edge from-layer="89" from-port="0" to-layer="90" to-port="0"/> <edge from-layer="90" from-port="1" to-layer="91" to-port="1"/> <edge from-layer="91" from-port="2" to-layer="94" to-port="0"/> <edge from-layer="92" from-port="0" to-layer="93" to-port="0"/> <edge from-layer="93" from-port="1" to-layer="94" to-port="1"/> <edge from-layer="94" from-port="2" to-layer="95" to-port="0"/> <edge from-layer="95" from-port="1" to-layer="98" to-port="0"/> <edge from-layer="96" from-port="0" to-layer="97" to-port="0"/> <edge from-layer="97" from-port="1" to-layer="98" to-port="1"/> <edge from-layer="98" from-port="2" to-layer="101" to-port="0"/> <edge from-layer="99" from-port="0" to-layer="100" to-port="0"/> <edge from-layer="100" from-port="1" to-layer="101" to-port="1"/> <edge from-layer="101" from-port="2" to-layer="102" to-port="0"/> <edge from-layer="102" from-port="1" to-layer="103" to-port="0"/> <edge from-layer="102" from-port="1" to-layer="121" to-port="0"/> <edge from-layer="103" from-port="1" to-layer="106" to-port="0"/> <edge from-layer="104" from-port="0" to-layer="105" to-port="0"/> <edge from-layer="105" from-port="1" to-layer="106" to-port="1"/> <edge from-layer="106" from-port="2" to-layer="109" to-port="0"/> <edge from-layer="107" from-port="0" to-layer="108" to-port="0"/> <edge from-layer="108" from-port="1" to-layer="109" to-port="1"/> <edge from-layer="109" from-port="2" to-layer="110" to-port="0"/> <edge from-layer="110" from-port="1" to-layer="113" to-port="0"/> <edge from-layer="111" from-port="0" to-layer="112" to-port="0"/> <edge from-layer="112" from-port="1" to-layer="113" to-port="1"/> <edge from-layer="113" from-port="2" to-layer="116" to-port="0"/> <edge from-layer="114" from-port="0" to-layer="115" to-port="0"/> <edge from-layer="115" from-port="1" to-layer="116" to-port="1"/> <edge from-layer="116" from-port="2" to-layer="117" to-port="0"/> <edge from-layer="117" from-port="1" to-layer="120" to-port="0"/> <edge from-layer="118" from-port="0" to-layer="119" to-port="0"/> <edge from-layer="119" from-port="1" to-layer="120" to-port="1"/> <edge from-layer="120" from-port="2" to-layer="121" to-port="1"/> <edge from-layer="121" from-port="2" to-layer="124" to-port="0"/> <edge from-layer="122" from-port="0" to-layer="123" to-port="0"/> <edge from-layer="123" from-port="1" to-layer="124" to-port="1"/> <edge from-layer="124" from-port="2" to-layer="127" to-port="0"/> <edge from-layer="125" from-port="0" to-layer="126" to-port="0"/> <edge from-layer="126" from-port="1" to-layer="127" to-port="1"/> <edge from-layer="127" from-port="2" to-layer="130" to-port="0"/> <edge from-layer="127" from-port="2" to-layer="167" to-port="1"/> <edge from-layer="128" from-port="0" to-layer="129" to-port="0"/> <edge from-layer="129" from-port="1" to-layer="130" to-port="1"/> <edge from-layer="130" from-port="2" to-layer="133" to-port="0"/> <edge from-layer="131" from-port="0" to-layer="132" to-port="0"/> <edge from-layer="132" from-port="1" to-layer="133" to-port="1"/> <edge from-layer="133" from-port="2" to-layer="134" to-port="0"/> <edge from-layer="134" from-port="1" to-layer="137" to-port="0"/> <edge from-layer="135" from-port="0" to-layer="136" to-port="0"/> <edge from-layer="136" from-port="1" to-layer="137" to-port="1"/> <edge from-layer="137" from-port="2" to-layer="140" to-port="0"/> <edge from-layer="138" from-port="0" to-layer="139" to-port="0"/> <edge from-layer="139" from-port="1" to-layer="140" to-port="1"/> <edge from-layer="140" from-port="2" to-layer="141" to-port="0"/> <edge from-layer="141" from-port="1" to-layer="142" to-port="0"/> <edge from-layer="141" from-port="1" to-layer="160" to-port="0"/> <edge from-layer="142" from-port="1" to-layer="145" to-port="0"/> <edge from-layer="143" from-port="0" to-layer="144" to-port="0"/> <edge from-layer="144" from-port="1" to-layer="145" to-port="1"/> <edge from-layer="145" from-port="2" to-layer="148" to-port="0"/> <edge from-layer="146" from-port="0" to-layer="147" to-port="0"/> <edge from-layer="147" from-port="1" to-layer="148" to-port="1"/> <edge from-layer="148" from-port="2" to-layer="149" to-port="0"/> <edge from-layer="149" from-port="1" to-layer="152" to-port="0"/> <edge from-layer="150" from-port="0" to-layer="151" to-port="0"/> <edge from-layer="151" from-port="1" to-layer="152" to-port="1"/> <edge from-layer="152" from-port="2" to-layer="155" to-port="0"/> <edge from-layer="153" from-port="0" to-layer="154" to-port="0"/> <edge from-layer="154" from-port="1" to-layer="155" to-port="1"/> <edge from-layer="155" from-port="2" to-layer="156" to-port="0"/> <edge from-layer="156" from-port="1" to-layer="159" to-port="0"/> <edge from-layer="157" from-port="0" to-layer="158" to-port="0"/> <edge from-layer="158" from-port="1" to-layer="159" to-port="1"/> <edge from-layer="159" from-port="2" to-layer="160" to-port="1"/> <edge from-layer="160" from-port="2" to-layer="163" to-port="0"/> <edge from-layer="161" from-port="0" to-layer="162" to-port="0"/> <edge from-layer="162" from-port="1" to-layer="163" to-port="1"/> <edge from-layer="163" from-port="2" to-layer="166" to-port="0"/> <edge from-layer="164" from-port="0" to-layer="165" to-port="0"/> <edge from-layer="165" from-port="1" to-layer="166" to-port="1"/> <edge from-layer="166" from-port="2" to-layer="167" to-port="0"/> <edge from-layer="167" from-port="2" to-layer="170" to-port="0"/> <edge from-layer="167" from-port="2" to-layer="207" to-port="1"/> <edge from-layer="168" from-port="0" to-layer="169" to-port="0"/> <edge from-layer="169" from-port="1" to-layer="170" to-port="1"/> <edge from-layer="170" from-port="2" to-layer="173" to-port="0"/> <edge from-layer="171" from-port="0" to-layer="172" to-port="0"/> <edge from-layer="172" from-port="1" to-layer="173" to-port="1"/> <edge from-layer="173" from-port="2" to-layer="174" to-port="0"/> <edge from-layer="174" from-port="1" to-layer="177" to-port="0"/> <edge from-layer="175" from-port="0" to-layer="176" to-port="0"/> <edge from-layer="176" from-port="1" to-layer="177" to-port="1"/> <edge from-layer="177" from-port="2" to-layer="180" to-port="0"/> <edge from-layer="178" from-port="0" to-layer="179" to-port="0"/> <edge from-layer="179" from-port="1" to-layer="180" to-port="1"/> <edge from-layer="180" from-port="2" to-layer="181" to-port="0"/> <edge from-layer="181" from-port="1" to-layer="182" to-port="0"/> <edge from-layer="181" from-port="1" to-layer="200" to-port="0"/> <edge from-layer="182" from-port="1" to-layer="185" to-port="0"/> <edge from-layer="183" from-port="0" to-layer="184" to-port="0"/> <edge from-layer="184" from-port="1" to-layer="185" to-port="1"/> <edge from-layer="185" from-port="2" to-layer="188" to-port="0"/> <edge from-layer="186" from-port="0" to-layer="187" to-port="0"/> <edge from-layer="187" from-port="1" to-layer="188" to-port="1"/> <edge from-layer="188" from-port="2" to-layer="189" to-port="0"/> <edge from-layer="189" from-port="1" to-layer="192" to-port="0"/> <edge from-layer="190" from-port="0" to-layer="191" to-port="0"/> <edge from-layer="191" from-port="1" to-layer="192" to-port="1"/> <edge from-layer="192" from-port="2" to-layer="195" to-port="0"/> <edge from-layer="193" from-port="0" to-layer="194" to-port="0"/> <edge from-layer="194" from-port="1" to-layer="195" to-port="1"/> <edge from-layer="195" from-port="2" to-layer="196" to-port="0"/> <edge from-layer="196" from-port="1" to-layer="199" to-port="0"/> <edge from-layer="197" from-port="0" to-layer="198" to-port="0"/> <edge from-layer="198" from-port="1" to-layer="199" to-port="1"/> <edge from-layer="199" from-port="2" to-layer="200" to-port="1"/> <edge from-layer="200" from-port="2" to-layer="203" to-port="0"/> <edge from-layer="201" from-port="0" to-layer="202" to-port="0"/> <edge from-layer="202" from-port="1" to-layer="203" to-port="1"/> <edge from-layer="203" from-port="2" to-layer="206" to-port="0"/> <edge from-layer="204" from-port="0" to-layer="205" to-port="0"/> <edge from-layer="205" from-port="1" to-layer="206" to-port="1"/> <edge from-layer="206" from-port="2" to-layer="207" to-port="0"/> <edge from-layer="207" from-port="2" to-layer="210" to-port="0"/> <edge from-layer="208" from-port="0" to-layer="209" to-port="0"/> <edge from-layer="209" from-port="1" to-layer="210" to-port="1"/> <edge from-layer="210" from-port="2" to-layer="213" to-port="0"/> <edge from-layer="211" from-port="0" to-layer="212" to-port="0"/> <edge from-layer="212" from-port="1" to-layer="213" to-port="1"/> <edge from-layer="213" from-port="2" to-layer="214" to-port="0"/> <edge from-layer="214" from-port="1" to-layer="217" to-port="0"/> <edge from-layer="215" from-port="0" to-layer="216" to-port="0"/> <edge from-layer="216" from-port="1" to-layer="217" to-port="1"/> <edge from-layer="217" from-port="2" to-layer="220" to-port="0"/> <edge from-layer="218" from-port="0" to-layer="219" to-port="0"/> <edge from-layer="219" from-port="1" to-layer="220" to-port="1"/> <edge from-layer="220" from-port="2" to-layer="221" to-port="0"/> <edge from-layer="221" from-port="1" to-layer="222" to-port="0"/> <edge from-layer="221" from-port="1" to-layer="240" to-port="0"/> <edge from-layer="222" from-port="1" to-layer="225" to-port="0"/> <edge from-layer="223" from-port="0" to-layer="224" to-port="0"/> <edge from-layer="224" from-port="1" to-layer="225" to-port="1"/> <edge from-layer="225" from-port="2" to-layer="228" to-port="0"/> <edge from-layer="226" from-port="0" to-layer="227" to-port="0"/> <edge from-layer="227" from-port="1" to-layer="228" to-port="1"/> <edge from-layer="228" from-port="2" to-layer="229" to-port="0"/> <edge from-layer="229" from-port="1" to-layer="232" to-port="0"/> <edge from-layer="230" from-port="0" to-layer="231" to-port="0"/> <edge from-layer="231" from-port="1" to-layer="232" to-port="1"/> <edge from-layer="232" from-port="2" to-layer="235" to-port="0"/> <edge from-layer="233" from-port="0" to-layer="234" to-port="0"/> <edge from-layer="234" from-port="1" to-layer="235" to-port="1"/> <edge from-layer="235" from-port="2" to-layer="236" to-port="0"/> <edge from-layer="236" from-port="1" to-layer="239" to-port="0"/> <edge from-layer="237" from-port="0" to-layer="238" to-port="0"/> <edge from-layer="238" from-port="1" to-layer="239" to-port="1"/> <edge from-layer="239" from-port="2" to-layer="240" to-port="1"/> <edge from-layer="240" from-port="2" to-layer="243" to-port="0"/> <edge from-layer="241" from-port="0" to-layer="242" to-port="0"/> <edge from-layer="242" from-port="1" to-layer="243" to-port="1"/> <edge from-layer="243" from-port="2" to-layer="246" to-port="0"/> <edge from-layer="244" from-port="0" to-layer="245" to-port="0"/> <edge from-layer="245" from-port="1" to-layer="246" to-port="1"/> <edge from-layer="246" from-port="2" to-layer="249" to-port="0"/> <edge from-layer="246" from-port="2" to-layer="286" to-port="1"/> <edge from-layer="247" from-port="0" to-layer="248" to-port="0"/> <edge from-layer="248" from-port="1" to-layer="249" to-port="1"/> <edge from-layer="249" from-port="2" to-layer="252" to-port="0"/> <edge from-layer="250" from-port="0" to-layer="251" to-port="0"/> <edge from-layer="251" from-port="1" to-layer="252" to-port="1"/> <edge from-layer="252" from-port="2" to-layer="253" to-port="0"/> <edge from-layer="253" from-port="1" to-layer="256" to-port="0"/> <edge from-layer="254" from-port="0" to-layer="255" to-port="0"/> <edge from-layer="255" from-port="1" to-layer="256" to-port="1"/> <edge from-layer="256" from-port="2" to-layer="259" to-port="0"/> <edge from-layer="257" from-port="0" to-layer="258" to-port="0"/> <edge from-layer="258" from-port="1" to-layer="259" to-port="1"/> <edge from-layer="259" from-port="2" to-layer="260" to-port="0"/> <edge from-layer="260" from-port="1" to-layer="261" to-port="0"/> <edge from-layer="260" from-port="1" to-layer="279" to-port="0"/> <edge from-layer="261" from-port="1" to-layer="264" to-port="0"/> <edge from-layer="262" from-port="0" to-layer="263" to-port="0"/> <edge from-layer="263" from-port="1" to-layer="264" to-port="1"/> <edge from-layer="264" from-port="2" to-layer="267" to-port="0"/> <edge from-layer="265" from-port="0" to-layer="266" to-port="0"/> <edge from-layer="266" from-port="1" to-layer="267" to-port="1"/> <edge from-layer="267" from-port="2" to-layer="268" to-port="0"/> <edge from-layer="268" from-port="1" to-layer="271" to-port="0"/> <edge from-layer="269" from-port="0" to-layer="270" to-port="0"/> <edge from-layer="270" from-port="1" to-layer="271" to-port="1"/> <edge from-layer="271" from-port="2" to-layer="274" to-port="0"/> <edge from-layer="272" from-port="0" to-layer="273" to-port="0"/> <edge from-layer="273" from-port="1" to-layer="274" to-port="1"/> <edge from-layer="274" from-port="2" to-layer="275" to-port="0"/> <edge from-layer="275" from-port="1" to-layer="278" to-port="0"/> <edge from-layer="276" from-port="0" to-layer="277" to-port="0"/> <edge from-layer="277" from-port="1" to-layer="278" to-port="1"/> <edge from-layer="278" from-port="2" to-layer="279" to-port="1"/> <edge from-layer="279" from-port="2" to-layer="282" to-port="0"/> <edge from-layer="280" from-port="0" to-layer="281" to-port="0"/> <edge from-layer="281" from-port="1" to-layer="282" to-port="1"/> <edge from-layer="282" from-port="2" to-layer="285" to-port="0"/> <edge from-layer="283" from-port="0" to-layer="284" to-port="0"/> <edge from-layer="284" from-port="1" to-layer="285" to-port="1"/> <edge from-layer="285" from-port="2" to-layer="286" to-port="0"/> <edge from-layer="286" from-port="2" to-layer="289" to-port="0"/> <edge from-layer="287" from-port="0" to-layer="288" to-port="0"/> <edge from-layer="288" from-port="1" to-layer="289" to-port="1"/> <edge from-layer="289" from-port="2" to-layer="292" to-port="0"/> <edge from-layer="290" from-port="0" to-layer="291" to-port="0"/> <edge from-layer="291" from-port="1" to-layer="292" to-port="1"/> <edge from-layer="292" from-port="2" to-layer="293" to-port="0"/> <edge from-layer="293" from-port="1" to-layer="296" to-port="0"/> <edge from-layer="294" from-port="0" to-layer="295" to-port="0"/> <edge from-layer="295" from-port="1" to-layer="296" to-port="1"/> <edge from-layer="296" from-port="2" to-layer="299" to-port="0"/> <edge from-layer="297" from-port="0" to-layer="298" to-port="0"/> <edge from-layer="298" from-port="1" to-layer="299" to-port="1"/> <edge from-layer="299" from-port="2" to-layer="300" to-port="0"/> <edge from-layer="300" from-port="1" to-layer="301" to-port="0"/> <edge from-layer="300" from-port="1" to-layer="319" to-port="0"/> <edge from-layer="301" from-port="1" to-layer="304" to-port="0"/> <edge from-layer="302" from-port="0" to-layer="303" to-port="0"/> <edge from-layer="303" from-port="1" to-layer="304" to-port="1"/> <edge from-layer="304" from-port="2" to-layer="307" to-port="0"/> <edge from-layer="305" from-port="0" to-layer="306" to-port="0"/> <edge from-layer="306" from-port="1" to-layer="307" to-port="1"/> <edge from-layer="307" from-port="2" to-layer="308" to-port="0"/> <edge from-layer="308" from-port="1" to-layer="311" to-port="0"/> <edge from-layer="309" from-port="0" to-layer="310" to-port="0"/> <edge from-layer="310" from-port="1" to-layer="311" to-port="1"/> <edge from-layer="311" from-port="2" to-layer="314" to-port="0"/> <edge from-layer="312" from-port="0" to-layer="313" to-port="0"/> <edge from-layer="313" from-port="1" to-layer="314" to-port="1"/> <edge from-layer="314" from-port="2" to-layer="315" to-port="0"/> <edge from-layer="315" from-port="1" to-layer="318" to-port="0"/> <edge from-layer="316" from-port="0" to-layer="317" to-port="0"/> <edge from-layer="317" from-port="1" to-layer="318" to-port="1"/> <edge from-layer="318" from-port="2" to-layer="319" to-port="1"/> <edge from-layer="319" from-port="2" to-layer="322" to-port="0"/> <edge from-layer="320" from-port="0" to-layer="321" to-port="0"/> <edge from-layer="321" from-port="1" to-layer="322" to-port="1"/> <edge from-layer="322" from-port="2" to-layer="325" to-port="0"/> <edge from-layer="323" from-port="0" to-layer="324" to-port="0"/> <edge from-layer="324" from-port="1" to-layer="325" to-port="1"/> <edge from-layer="325" from-port="2" to-layer="365" to-port="1"/> <edge from-layer="325" from-port="2" to-layer="328" to-port="0"/> <edge from-layer="326" from-port="0" to-layer="327" to-port="0"/> <edge from-layer="327" from-port="1" to-layer="328" to-port="1"/> <edge from-layer="328" from-port="2" to-layer="331" to-port="0"/> <edge from-layer="329" from-port="0" to-layer="330" to-port="0"/> <edge from-layer="330" from-port="1" to-layer="331" to-port="1"/> <edge from-layer="331" from-port="2" to-layer="332" to-port="0"/> <edge from-layer="332" from-port="1" to-layer="335" to-port="0"/> <edge from-layer="333" from-port="0" to-layer="334" to-port="0"/> <edge from-layer="334" from-port="1" to-layer="335" to-port="1"/> <edge from-layer="335" from-port="2" to-layer="338" to-port="0"/> <edge from-layer="336" from-port="0" to-layer="337" to-port="0"/> <edge from-layer="337" from-port="1" to-layer="338" to-port="1"/> <edge from-layer="338" from-port="2" to-layer="339" to-port="0"/> <edge from-layer="339" from-port="1" to-layer="340" to-port="0"/> <edge from-layer="339" from-port="1" to-layer="358" to-port="0"/> <edge from-layer="340" from-port="1" to-layer="343" to-port="0"/> <edge from-layer="341" from-port="0" to-layer="342" to-port="0"/> <edge from-layer="342" from-port="1" to-layer="343" to-port="1"/> <edge from-layer="343" from-port="2" to-layer="346" to-port="0"/> <edge from-layer="344" from-port="0" to-layer="345" to-port="0"/> <edge from-layer="345" from-port="1" to-layer="346" to-port="1"/> <edge from-layer="346" from-port="2" to-layer="347" to-port="0"/> <edge from-layer="347" from-port="1" to-layer="350" to-port="0"/> <edge from-layer="348" from-port="0" to-layer="349" to-port="0"/> <edge from-layer="349" from-port="1" to-layer="350" to-port="1"/> <edge from-layer="350" from-port="2" to-layer="353" to-port="0"/> <edge from-layer="351" from-port="0" to-layer="352" to-port="0"/> <edge from-layer="352" from-port="1" to-layer="353" to-port="1"/> <edge from-layer="353" from-port="2" to-layer="354" to-port="0"/> <edge from-layer="354" from-port="1" to-layer="357" to-port="0"/> <edge from-layer="355" from-port="0" to-layer="356" to-port="0"/> <edge from-layer="356" from-port="1" to-layer="357" to-port="1"/> <edge from-layer="357" from-port="2" to-layer="358" to-port="1"/> <edge from-layer="358" from-port="2" to-layer="361" to-port="0"/> <edge from-layer="359" from-port="0" to-layer="360" to-port="0"/> <edge from-layer="360" from-port="1" to-layer="361" to-port="1"/> <edge from-layer="361" from-port="2" to-layer="364" to-port="0"/> <edge from-layer="362" from-port="0" to-layer="363" to-port="0"/> <edge from-layer="363" from-port="1" to-layer="364" to-port="1"/> <edge from-layer="364" from-port="2" to-layer="365" to-port="0"/> <edge from-layer="365" from-port="2" to-layer="368" to-port="0"/> <edge from-layer="365" from-port="2" to-layer="405" to-port="1"/> <edge from-layer="366" from-port="0" to-layer="367" to-port="0"/> <edge from-layer="367" from-port="1" to-layer="368" to-port="1"/> <edge from-layer="368" from-port="2" to-layer="371" to-port="0"/> <edge from-layer="369" from-port="0" to-layer="370" to-port="0"/> <edge from-layer="370" from-port="1" to-layer="371" to-port="1"/> <edge from-layer="371" from-port="2" to-layer="372" to-port="0"/> <edge from-layer="372" from-port="1" to-layer="375" to-port="0"/> <edge from-layer="373" from-port="0" to-layer="374" to-port="0"/> <edge from-layer="374" from-port="1" to-layer="375" to-port="1"/> <edge from-layer="375" from-port="2" to-layer="378" to-port="0"/> <edge from-layer="376" from-port="0" to-layer="377" to-port="0"/> <edge from-layer="377" from-port="1" to-layer="378" to-port="1"/> <edge from-layer="378" from-port="2" to-layer="379" to-port="0"/> <edge from-layer="379" from-port="1" to-layer="380" to-port="0"/> <edge from-layer="379" from-port="1" to-layer="398" to-port="0"/> <edge from-layer="380" from-port="1" to-layer="383" to-port="0"/> <edge from-layer="381" from-port="0" to-layer="382" to-port="0"/> <edge from-layer="382" from-port="1" to-layer="383" to-port="1"/> <edge from-layer="383" from-port="2" to-layer="386" to-port="0"/> <edge from-layer="384" from-port="0" to-layer="385" to-port="0"/> <edge from-layer="385" from-port="1" to-layer="386" to-port="1"/> <edge from-layer="386" from-port="2" to-layer="387" to-port="0"/> <edge from-layer="387" from-port="1" to-layer="390" to-port="0"/> <edge from-layer="388" from-port="0" to-layer="389" to-port="0"/> <edge from-layer="389" from-port="1" to-layer="390" to-port="1"/> <edge from-layer="390" from-port="2" to-layer="393" to-port="0"/> <edge from-layer="391" from-port="0" to-layer="392" to-port="0"/> <edge from-layer="392" from-port="1" to-layer="393" to-port="1"/> <edge from-layer="393" from-port="2" to-layer="394" to-port="0"/> <edge from-layer="394" from-port="1" to-layer="397" to-port="0"/> <edge from-layer="395" from-port="0" to-layer="396" to-port="0"/> <edge from-layer="396" from-port="1" to-layer="397" to-port="1"/> <edge from-layer="397" from-port="2" to-layer="398" to-port="1"/> <edge from-layer="398" from-port="2" to-layer="401" to-port="0"/> <edge from-layer="399" from-port="0" to-layer="400" to-port="0"/> <edge from-layer="400" from-port="1" to-layer="401" to-port="1"/> <edge from-layer="401" from-port="2" to-layer="404" to-port="0"/> <edge from-layer="402" from-port="0" to-layer="403" to-port="0"/> <edge from-layer="403" from-port="1" to-layer="404" to-port="1"/> <edge from-layer="404" from-port="2" to-layer="405" to-port="0"/> <edge from-layer="405" from-port="2" to-layer="408" to-port="0"/> <edge from-layer="406" from-port="0" to-layer="407" to-port="0"/> <edge from-layer="407" from-port="1" to-layer="408" to-port="1"/> <edge from-layer="408" from-port="2" to-layer="411" to-port="0"/> <edge from-layer="409" from-port="0" to-layer="410" to-port="0"/> <edge from-layer="410" from-port="1" to-layer="411" to-port="1"/> <edge from-layer="411" from-port="2" to-layer="412" to-port="0"/> <edge from-layer="412" from-port="1" to-layer="413" to-port="0"/> <edge from-layer="413" from-port="1" to-layer="416" to-port="0"/> <edge from-layer="414" from-port="0" to-layer="415" to-port="0"/> <edge from-layer="415" from-port="1" to-layer="416" to-port="1"/> <edge from-layer="416" from-port="2" to-layer="419" to-port="0"/> <edge from-layer="417" from-port="0" to-layer="418" to-port="0"/> <edge from-layer="418" from-port="1" to-layer="419" to-port="1"/> <edge from-layer="419" from-port="2" to-layer="420" to-port="0"/> <edge from-layer="420" from-port="1" to-layer="421" to-port="0"/> <edge from-layer="421" from-port="1" to-layer="424" to-port="0"/> <edge from-layer="422" from-port="0" to-layer="423" to-port="0"/> <edge from-layer="423" from-port="1" to-layer="424" to-port="1"/> <edge from-layer="424" from-port="2" to-layer="427" to-port="0"/> <edge from-layer="425" from-port="0" to-layer="426" to-port="0"/> <edge from-layer="426" from-port="1" to-layer="427" to-port="1"/> <edge from-layer="427" from-port="2" to-layer="429" to-port="0"/> <edge from-layer="428" from-port="0" to-layer="429" to-port="1"/> <edge from-layer="429" from-port="2" to-layer="431" to-port="0"/> <edge from-layer="430" from-port="0" to-layer="431" to-port="1"/> <edge from-layer="431" from-port="2" to-layer="432" to-port="0"/> <edge from-layer="432" from-port="1" to-layer="433" to-port="0"/> </edges> <meta_data> <MO_version value="2022.1.0-6148-76a74f3f2d3"/> <cli_parameters> <caffe_parser_path value="DIR"/> <compress_fp16 value="True"/> <data_type value="FP16"/> <disable_nhwc_to_nchw value="False"/> <disable_omitting_optional value="False"/> <disable_resnet_optimization value="False"/> <disable_weights_compression value="False"/> <enable_concat_optimization value="False"/> <enable_flattening_nested_params value="False"/> <enable_ssd_gluoncv value="False"/> <extensions value="DIR"/> <framework value="tf"/> <freeze_placeholder_with_value value="{}"/> <generate_deprecated_IR_V7 value="False"/> <input_model value="DIR\v3-small_224_1.0_float.pb"/> <input_model_is_text value="False"/> <input_shape value="[1,224,224,3]"/> <k value="DIR\CustomLayersMapping.xml"/> <keep_shape_ops value="True"/> <layout value="()"/> <layout_values value="{}"/> <legacy_ir_generation value="False"/> <legacy_mxnet_model value="False"/> <log_level value="ERROR"/> <mean_scale_values value="[(array([127.5, 127.5, 127.5]), array([127.5]))]"/> <mean_values value="[127.5,127.5,127.5]"/> <model_name value="v3-small_224_1.0_float"/> <output_dir value="DIR"/> <packed_user_shapes value="defaultdict(&lt;class 'list'&gt;, {'input': [{'shape': (1, 224, 224, 3), 'port': None, 'added': True}]})"/> <placeholder_data_types value="{}"/> <placeholder_shapes value="(1, 224, 224, 3)"/> <progress value="False"/> <remove_memory value="False"/> <remove_output_softmax value="False"/> <reverse_input_channels value="False"/> <save_params_from_nd value="False"/> <scale_values value="[127.5]"/> <silent value="False"/> <source_layout value="()"/> <static_shape value="False"/> <stream_output value="False"/> <target_layout value="()"/> <transform value=""/> <use_legacy_frontend value="False"/> <use_new_frontend value="False"/> <unset unset_cli_parameters="batch, counts, disable_fusing, disable_gfusing, finegrain_fusing, input, input_checkpoint, input_meta_graph, input_proto, input_symbol, mean_file, mean_file_offsets, move_to_preprocess, nd_prefix_name, output, pretrained_model_name, saved_model_dir, saved_model_tags, scale, tensorboard_logdir, tensorflow_custom_layer_libraries, tensorflow_custom_operations_config_update, tensorflow_object_detection_api_pipeline_config, tensorflow_use_custom_operations_config, transformations_config"/> </cli_parameters> </meta_data> </net>