luckmatter/model_gen.py [268:317]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    def num_layers(self):
        return len(self.ws_linear) + 1

    def from_bottom_bn(self, j):
        assert j < len(self.ws_bn)
        return self.ws_bn[j]

def prune(net, ratios):
    # Prune the network and finetune. 
    n = net.num_layers()
    # Compute L1 norm and and prune them globally
    masks = []
    inactive_nodes = []
    for i in range(1, n):
        W = net.from_bottom_linear(i)
        # Prune all input neurons
        input_dim = W.size(1)
        fc_to_conv = False

        if isinstance(net, ModelConv):
            if len(W.size()) == 4:
                # W: [output_filter, input_filter, x, y]
                w_norms = W.permute(1, 0, 2, 3).contiguous().view(W.size(1), -1).abs().mean(1)
            else:
                # The final FC layer. 
                input_dim = net.from_bottom_linear(i - 1).size(0)
                W_reshaped = W.view(W.size(0), -1, input_dim)
                w_norms = W_reshaped.view(-1, input_dim).abs().mean(0)
                fc_to_conv = True
        else:
            # W: [output_dim, input_dim]
            w_norms = W.abs().mean(0)

        sorted_w, sorted_indices = w_norms.sort(0)
        n_pruned = int(input_dim * ratios[i - 1])
        inactive_mask = sorted_indices[:n_pruned]

        m = W.clone().fill_(1.0)
        if fc_to_conv:
            m = m.view(m.size(0), -1, input_dim) 
            m[:, :, inactive_mask] = 0
            m = m.view(W.size(0), W.size(1))
        else:
            m[:, inactive_mask] = 0

        # Set the mask for the lower layer to zero. 
        inactive_nodes.append(inactive_mask.cpu().tolist())
        masks.append(m)
        
    return inactive_nodes, masks
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



student_specialization/model_gen.py [305:354]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    def num_layers(self):
        return len(self.ws_linear) + 1

    def from_bottom_bn(self, j):
        assert j < len(self.ws_bn)
        return self.ws_bn[j]

def prune(net, ratios):
    # Prune the network and finetune. 
    n = net.num_layers()
    # Compute L1 norm and and prune them globally
    masks = []
    inactive_nodes = []
    for i in range(1, n):
        W = net.from_bottom_linear(i)
        # Prune all input neurons
        input_dim = W.size(1)
        fc_to_conv = False

        if isinstance(net, ModelConv):
            if len(W.size()) == 4:
                # W: [output_filter, input_filter, x, y]
                w_norms = W.permute(1, 0, 2, 3).contiguous().view(W.size(1), -1).abs().mean(1)
            else:
                # The final FC layer. 
                input_dim = net.from_bottom_linear(i - 1).size(0)
                W_reshaped = W.view(W.size(0), -1, input_dim)
                w_norms = W_reshaped.view(-1, input_dim).abs().mean(0)
                fc_to_conv = True
        else:
            # W: [output_dim, input_dim]
            w_norms = W.abs().mean(0)

        sorted_w, sorted_indices = w_norms.sort(0)
        n_pruned = int(input_dim * ratios[i - 1])
        inactive_mask = sorted_indices[:n_pruned]

        m = W.clone().fill_(1.0)
        if fc_to_conv:
            m = m.view(m.size(0), -1, input_dim) 
            m[:, :, inactive_mask] = 0
            m = m.view(W.size(0), W.size(1))
        else:
            m[:, inactive_mask] = 0

        # Set the mask for the lower layer to zero. 
        inactive_nodes.append(inactive_mask.cpu().tolist())
        masks.append(m)
        
    return inactive_nodes, masks
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



