def apply()

in onnxconverter_common/optimizer.py [0:0]


    def apply(self, node_list):
        if self.end_p.is_reserved:
            return None, False
        conv_ori_weight = numpy_helper.to_array(self.begin_n.get_precedence_by_idx(1).tensors[0])
        conv_ori_bias = 0
        if len(self.begin_n.precedence) > 2:
            conv_ori_bias = numpy_helper.to_array(self.begin_n.get_precedence_by_idx(2).tensors[0])
        scale = numpy_helper.to_array(self.end_p.get_precedence_by_idx(1).tensors[0])
        B = numpy_helper.to_array(self.end_p.get_precedence_by_idx(2).tensors[0])
        mean = numpy_helper.to_array(self.end_p.get_precedence_by_idx(3).tensors[0])
        var = numpy_helper.to_array(self.end_p.get_precedence_by_idx(4).tensors[0])
        epsilon = self.end_p.get_attribute('epsilon', 1.0e-5)
        adjusted_scale = scale / np.sqrt(var + epsilon)
        if len(conv_ori_weight.shape) == 4:
            conv_weight = conv_ori_weight * adjusted_scale[:, None, None, None]
        elif len(conv_ori_weight.shape) == 3:
            conv_weight = conv_ori_weight * adjusted_scale[:, None, None]
        elif len(conv_ori_weight.shape) == 2:
            conv_weight = conv_ori_weight * adjusted_scale[:, None]
        else:
            return None, False
        conv_bias = (conv_ori_bias - mean) * adjusted_scale + B

        conv_weight_name = self.begin_n.origin.name + '_W_new'
        conv_weight_initilizer = numpy_helper.from_array(conv_weight, name=conv_weight_name)
        conv_bias_name = self.begin_n.origin.name + '_B_new'
        conv_bias_initilizer = numpy_helper.from_array(conv_bias, name=conv_bias_name)

        self.begin_n.in_redirect(self.begin_n.origin.input[1], conv_weight_name)
        if len(self.begin_n.input) > 2:
            self.begin_n.in_redirect(self.begin_n.origin.input[2], conv_bias_name)
        else:
            self.begin_n.input[conv_bias_name] = conv_bias_name
        self.begin_n.initializers = [conv_weight_initilizer, conv_bias_initilizer]

        self.begin_n.successor = []
        for end_ in self.end:
            end_.in_redirect(self.end_p.origin.output[0], self.begin_n.origin.output[0])
            self.begin_n.successor.append(end_)
            end_.precedence[end_.precedence.index(self.end_p)] = self.begin_n

        node_list.remove(self.end_p)

        return node_list, True