in mmdnn/conversion/coreml/coreml_parser.py [0:0]
def gen_IR(self):
for i, layer in enumerate(self.coreml_graph.topological_sort):
current_node = self.coreml_graph.get_node(layer)
current_node_layer = current_node.layer
# determine the type of the current_node
node_type = current_node_layer.name
if isinstance(current_node_layer, Model_pb2.FeatureDescription):
self.rename_InputLayer(current_node)
elif isinstance(current_node_layer, NeuralNetwork_pb2.NeuralNetworkLayer):
if current_node_layer.HasField("convolution"):
self.rename_CONV2D(current_node)
elif current_node_layer.HasField('batchnorm'):
self.rename_BatchNormalization(current_node)
elif current_node_layer.HasField("scale"):
self.rename_scale(current_node)
elif current_node_layer.HasField("pooling"):
self.rename_Pooling(current_node)
elif current_node_layer.HasField("activation"):
self.rename_Activation(current_node)
elif current_node_layer.HasField("softmax"):
self.rename_Softmax(current_node)
elif current_node_layer.HasField("padding"):
self.rename_Padding(current_node)
elif current_node_layer.HasField("add"):
self.rename_Add(current_node)
elif current_node_layer.HasField("flatten"):
self.rename_Flatten(current_node)
elif current_node_layer.HasField("innerProduct"):
self.rename_innerProduct(current_node)
elif current_node_layer.HasField("concat"):
self.rename_Concatenate(current_node)
else:
print("CoremlParser has not supported operator [{}]".format(node_type))
self.rename_UNKNOWN(current_node)
else:
assert False