luckmatter/model_gen.py [85:103]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        super(Model, self).__init__()
        self.d = d
        self.ks = ks
        self.has_bn = has_bn
        self.ws_linear = nn.ModuleList()
        self.ws_bn = nn.ModuleList()
        self.bn_before_relu = bn_before_relu
        last_k = d
        self.sizes = [d]

        for k in ks:
            k *= multi
            self.ws_linear.append(nn.Linear(last_k, k, bias=has_bias))
            if has_bn:
                self.ws_bn.append(nn.BatchNorm1d(k, affine=has_bn_affine))
            self.sizes.append(k)
            last_k = k

        self.final_w = nn.Linear(last_k, d_output, bias=has_bias)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



student_specialization/model_gen.py [78:96]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        super(Model, self).__init__()
        self.d = d
        self.ks = ks
        self.has_bn = has_bn
        self.ws_linear = nn.ModuleList()
        self.ws_bn = nn.ModuleList()
        self.bn_before_relu = bn_before_relu
        last_k = d
        self.sizes = [d]

        for k in ks:
            k *= multi
            self.ws_linear.append(nn.Linear(last_k, k, bias=has_bias))
            if has_bn:
                self.ws_bn.append(nn.BatchNorm1d(k, affine=has_bn_affine))
            self.sizes.append(k)
            last_k = k

        self.final_w = nn.Linear(last_k, d_output, bias=has_bias)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



