def extensible_resnet_column_progNN()

in model/model.py [0:0]


    def extensible_resnet_column_progNN(self, x, kernels, filters, strides, task):
        """
        Define the subsequent columns of the progressive NN - ResNet-18
        """
        self.trainable_vars.append([])
        self.h_pnn.append([])
        self.h_pnn[task].append(x)

        # Conv1
        h = _conv(x, kernels[0], filters[0], strides[0], self.trainable_vars[task], name='conv_1_t%d'%(task))
        h = _bn(h, self.trainable_vars[task], self.train_phase[task], name='bn_1_t%d'%(task))
        # Add lateral connections
        for tt in range(task):
            U_w = weight_variable([1, 1, self.h_pnn[tt][0].get_shape().as_list()[-1], h.get_shape().as_list()[-1]], name='conv_1_w_t%d_tt%d'%(task, tt))
            U_b = bias_variable([h.get_shape().as_list()[-1]], name='conv_1_b_t%d_tt%d'%(task, tt))
            self.trainable_vars[task].append(U_w)
            self.trainable_vars[task].append(U_b)
            h += create_conv_layer(self.h_pnn[tt][0], U_w, U_b, apply_relu=False)
        h = tf.nn.relu(h)
        self.h_pnn[task].append(h)

        # Conv2_x
        h = _residual_block(h, self.trainable_vars[task], self.train_phase[task], name='conv2_1_t%d'%(task))
        h = _residual_block(h, self.trainable_vars[task], self.train_phase[task], apply_relu=False, name='conv2_2_t%d'%(task))
        # Add lateral connections
        for tt in range(task):
            U_w = weight_variable([1, 1, self.h_pnn[tt][1].get_shape().as_list()[-1], h.get_shape().as_list()[-1]], name='conv_2_w_t%d_tt%d'%(task, tt))
            U_b = bias_variable([h.get_shape().as_list()[-1]], name='conv_2_b_t%d_tt%d'%(task, tt))
            self.trainable_vars[task].append(U_w)
            self.trainable_vars[task].append(U_b)
            h += create_conv_layer(self.h_pnn[tt][1], U_w, U_b, apply_relu=False)
        h = tf.nn.relu(h)
        self.h_pnn[task].append(h)

        # Conv3_x
        h = _residual_block_first(h, filters[2], strides[2], self.trainable_vars[task], self.train_phase[task], name='conv3_1_t%d'%(task), is_ATT_DATASET=self.is_ATT_DATASET)
        h = _residual_block(h, self.trainable_vars[task], self.train_phase[task], apply_relu=False, name='conv3_2_t%d'%(task))
        # Add lateral connections
        for tt in range(task):
            U_w = weight_variable([1, 1, self.h_pnn[tt][2].get_shape().as_list()[-1], h.get_shape().as_list()[-1]], name='conv_3_w_t%d_tt%d'%(task, tt))
            U_b = bias_variable([h.get_shape().as_list()[-1]], name='conv_3_b_t%d_tt%d'%(task, tt))
            self.trainable_vars[task].append(U_w)
            self.trainable_vars[task].append(U_b)
            h += create_conv_layer(self.h_pnn[tt][2], U_w, U_b, stride=strides[2], apply_relu=False)
        h = tf.nn.relu(h)
        self.h_pnn[task].append(h)

        # Conv4_x
        h = _residual_block_first(h, filters[3], strides[3], self.trainable_vars[task], self.train_phase[task], name='conv4_1_t%d'%(task), is_ATT_DATASET=self.is_ATT_DATASET)
        h = _residual_block(h, self.trainable_vars[task], self.train_phase[task], apply_relu=False, name='conv4_2_t%d'%(task))
        # Add lateral connections
        for tt in range(task):
            U_w = weight_variable([1, 1, self.h_pnn[tt][3].get_shape().as_list()[-1], h.get_shape().as_list()[-1]], name='conv_4_w_t%d_tt%d'%(task, tt))
            U_b = bias_variable([h.get_shape().as_list()[-1]], name='conv_4_b_t%d_tt%d'%(task, tt))
            self.trainable_vars[task].append(U_w)
            self.trainable_vars[task].append(U_b)
            h += create_conv_layer(self.h_pnn[tt][3], U_w, U_b, stride=strides[3], apply_relu=False)
        h = tf.nn.relu(h)
        self.h_pnn[task].append(h)

        # Conv5_x
        h = _residual_block_first(h, filters[4], strides[4], self.trainable_vars[task], self.train_phase[task], name='conv5_1_t%d'%(task), is_ATT_DATASET=self.is_ATT_DATASET)
        h = _residual_block(h, self.trainable_vars[task], self.train_phase[task], apply_relu=False, name='conv5_2_t%d'%(task))
        # Add lateral connections
        for tt in range(task):
            U_w = weight_variable([1, 1, self.h_pnn[tt][4].get_shape().as_list()[-1], h.get_shape().as_list()[-1]], name='conv_5_w_t%d_tt%d'%(task, tt))
            U_b = bias_variable([h.get_shape().as_list()[-1]], name='conv_5_b_t%d_tt%d'%(task, tt))
            self.trainable_vars[task].append(U_w)
            self.trainable_vars[task].append(U_b)
            h += create_conv_layer(self.h_pnn[tt][4], U_w, U_b, stride=strides[4], apply_relu=False)
        h = tf.nn.relu(h)
        self.h_pnn[task].append(h)

        # Apply average pooling
        h = tf.reduce_mean(h, [1, 2])

        if self.network_arch == 'RESNET-S':
            logits = _fc(h, self.total_classes, self.trainable_vars[task], name='fc_1_t%d'%(task), is_cifar=True)
        else:
            logits = _fc(h, self.total_classes, self.trainable_vars[task], name='fc_1_t%d'%(task))
        for tt in range(task):
            h_tt = tf.reduce_mean(self.h_pnn[tt][5], [1, 2])
            U_w = weight_variable([h_tt.get_shape().as_list()[1], self.total_classes], name='fc_uw_1_t%d_tt%d'%(task, tt))
            U_b = bias_variable([self.total_classes], name='fc_ub_1_t%d_tt%d'%(task, tt))
            self.trainable_vars[task].append(U_w)
            self.trainable_vars[task].append(U_b)
            logits += create_fc_layer(h_tt, U_w, U_b, apply_relu=False)
        self.h_pnn[task].append(logits)

        return logits