in scripts/float16.py [0:0]
def check_if_fp16_ready(graph_proto):
# Check graph input and ouput
is_value_info_fp16 = False
for value_info in itertools.chain(
graph_proto.output, graph_proto.input, graph_proto.value_info
):
if value_info.type.tensor_type.elem_type == onnx_proto.TensorProto.FLOAT16:
is_value_info_fp16 = True
break
# Check initializer
is_initializer_fp16 = False
for initializer in graph_proto.initializer:
if initializer.data_type == onnx_proto.TensorProto.FLOAT16:
is_initializer_fp16 = True
break
# Check cast node
has_cast_node_fp16 = False
for node in graph_proto.node:
if node.op_type == "Cast" and node.attribute[0].i == FLOAT16:
has_cast_node_fp16 = True
break
# Any of above flags is True, return True
if is_value_info_fp16 or is_initializer_fp16 or has_cast_node_fp16:
return True # already converted to float16
else:
return False # not converted to float16 yet