def whitelist_partition()

in python/graph_util.py [0:0]


def whitelist_partition(graph_def, signature_def,
                        supported_op_types=None, no_fuse_ops=None, force_fuse_ops=None,
                        minimum_segment_size=None):
    """Partitions a `GraphDef` proto according to a TensorFlow op whitelist and
    fuses each whitelisted subgraph into an `NeuronOp`.

    Args:
        graph_def: input `GraphDef` proto.
        signature_def: a `SignatureDef` protobuf message marking graph inputs and outputs.
        supported_op_types: None or iterable of strings (unordered) representing
            whitelisted op type names.
        no_fuse_ops: None or iterable of strings (unordered) representing
            names of ops that will stay unfused.
        force_fuse_ops: None or iterable of strings (unordered) representing
            names of ops that will be forcibly fused into `NeuronOp`.
        minimum_segment_size: int; minimum number of ops in an `NeuronOp`.

    Returns:
        A `GraphDef` proto with whitelisted subgraphs fused as `NeuronOp`s.
    """
    original_graph_def = graph_def
    if supported_op_types is None:
        neuron_cc = ncc.find_neuron_cc()
        if neuron_cc is None:
            return graph_def
        else:
            command = [neuron_cc, 'list-operators', '--framework', 'TENSORFLOW']
            try:
                supported_op_types = {op_type.strip() for op_type in subprocess.check_output(command).decode()[:-1].split('\n')}
            except subprocess.CalledProcessError:
                logging.warning('neuron-cc is not behaving correctly. Please check neuron-cc '
                                'installation, or reinstall by "pip install --force neuron-cc".')
                return graph_def
            supported_op_types.discard('Placeholder')
            supported_op_types.discard('IdentityN')
            supported_op_types.add('SquaredDifference')
    if no_fuse_ops is None:
        no_fuse_ops = []
    if force_fuse_ops is None:
        force_fuse_ops = []
    if minimum_segment_size is None:
        num_ops = len([node for node in graph_def.node if node.op != 'Placeholder'])
        minimum_segment_size = min(2, max(1, num_ops))
    opt_config = config_pb2.ConfigProto()
    rewriter_config = opt_config.graph_options.rewrite_options
    rewriter_config.meta_optimizer_iterations = 1
    rewriter_config.min_graph_nodes = 2
    rewriter_config.optimizers.append('aws_neuron_static_shape_inference')

    # configure operator fusion
    fuser_config = rewriter_config.custom_optimizers.add()
    fuser_config.name = 'aws_neuron_fuse_supported_operators'
    param_map = fuser_config.parameter_map
    param_map['minimum_segment_size'].i = minimum_segment_size
    param_map['supported_op_types'].list.s.extend(compat.as_bytes(item) for item in supported_op_types)
    param_map['no_fuse_ops'].list.s.extend(compat.as_bytes(getattr(item, 'name', item)) for item in no_fuse_ops)
    param_map['force_fuse_ops'].list.s.extend(compat.as_bytes(getattr(item, 'name', item)) for item in force_fuse_ops)

    # create meta_graph_def and run grappler passes
    meta_graph_def = meta_graph_pb2.MetaGraphDef(graph_def=graph_def)
    meta_graph_def.signature_def['serving_default'].CopyFrom(signature_def)
    graph_def = tf_optimizer.OptimizeGraph(opt_config, meta_graph_def)

    # add subgraph's control input to `NeuronOp`'s control input
    original_node_with_control_inputs = gdu.get_node_with_control_inputs(original_graph_def)
    post_part_node_names = {node.name for node in graph_def.node}
    for node in gdu.get_neuron_nodes(graph_def):
        for sg_node in gdu.get_subgraph_def(node).node:
            if sg_node.name in original_node_with_control_inputs:
                for inp in original_node_with_control_inputs[sg_node.name]:
                    if inp.lstrip('^') in post_part_node_names and inp not in node.input:
                        node.input.append(inp)
    return graph_def