def make_layers_custom()

in models/cifar/allconv_cnsn.py [0:0]


def make_layers_custom(cfg, norm_func, pos, beta, crop, cnsn_type):
  """Create a single layer."""
  layers = []
  in_channels = 3
  pos = int(pos)
  print('pos in [conv, norm, relu]: {}'.format(pos))
  assert pos in [1, 2, 3]
  assert cnsn_type in ['sn', 'cn', 'cnsn']

  for v in cfg:
    if v == 'Md':
      layers += [nn.MaxPool2d(kernel_size=2, stride=2), nn.Dropout(p=0.5)]
    elif v == 'A':
      layers += [nn.AvgPool2d(kernel_size=8)]
    elif v == 'NIN':
      conv2d = nn.Conv2d(in_channels, in_channels, kernel_size=1, padding=1)
      tmp_layers = [conv2d, norm_func(in_channels), GELU()]

      if 'cn' in cnsn_type:
        print('using CrossNorm with crop: {}'.format(crop))
        crossnorm = CrossNorm(crop=crop, beta=beta)
      else:
        crossnorm = None

      if 'sn' in cnsn_type:
        print('using SelfNorm')
        selfnorm = SelfNorm(in_channels)
      else:
        selfnorm = None

      cnsn = CNSN(crossnorm=crossnorm, selfnorm=selfnorm)

      tmp_layers.insert(pos, cnsn)

      layers += tmp_layers
    elif v == 'nopad':
      conv2d = nn.Conv2d(in_channels, in_channels, kernel_size=3, padding=0)
      tmp_layers = [conv2d, norm_func(in_channels), GELU()]

      if 'cn' in cnsn_type:
        print('using CrossNorm with crop: {}'.format(crop))
        crossnorm = CrossNorm(crop=crop, beta=beta)
      else:
        crossnorm = None

      if 'sn' in cnsn_type:
        print('using SelfNorm')
        selfnorm = SelfNorm(in_channels)
      else:
        selfnorm = None

      cnsn = CNSN(crossnorm=crossnorm, selfnorm=selfnorm)

      tmp_layers.insert(pos, cnsn)

      layers += tmp_layers
    else:
      conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
      tmp_layers = [conv2d, norm_func(v), GELU()]

      if 'cn' in cnsn_type:
        print('using CrossNorm with crop: {}'.format(crop))
        crossnorm = CrossNorm(crop=crop, beta=beta)
      else:
        crossnorm = None

      if 'sn' in cnsn_type:
        print('using SelfNorm')
        selfnorm = SelfNorm(v)
      else:
        selfnorm = None

      cnsn = CNSN(crossnorm=crossnorm, selfnorm=selfnorm)

      tmp_layers.insert(pos, cnsn)

      layers += tmp_layers
      in_channels = v

  return nn.Sequential(*layers)