def forward()

in python/singa/autograd.py [0:0]


    def forward(self, x):
        if not self.pad_width:
            half_width = len(self.pads) // 2
            for i in range(half_width):
                self.pad_width += ((self.pads[i], self.pads[i + half_width])),

        for axis, pads in zip(range(len(x.shape())), self.pad_width):
            for pad, is_left in zip(pads, (True, False)):
                if pad == 0:
                    continue
                pad_shape = list(x.shape())
                if self.mode == "constant":
                    pad_shape[axis] = pad
                    padding = singa.Tensor(list(pad_shape), x.device())
                    padding.SetFloatValue(self.constant)
                    if is_left:
                        x = singa.ConcatOn(singa.VecTensor([padding, x]), axis)
                    else:
                        x = singa.ConcatOn(singa.VecTensor([x, padding]), axis)
                elif self.mode == "reflect":
                    axis_shape = pad_shape[axis]
                    if is_left:
                        padding = singa.SliceOn(x, 0, pad, axis)
                        x = singa.ConcatOn(singa.VecTensor([padding, x]), axis)
                    else:
                        padding = singa.SliceOn(x, axis_shape - pad, axis_shape,
                                                axis)
                        x = singa.ConcatOn(singa.VecTensor([x, padding]), axis)
                elif self.mode == "edge":
                    axis_shape = pad_shape[axis]
                    if is_left:
                        padding = []
                        for _ in range(pad):
                            padding.append(singa.SliceOn(x, 0, 1, axis))
                        padding.append(x)
                        padding = singa.VecTensor(padding)
                        x = singa.ConcatOn(padding, axis)
                    else:
                        padding = [x]
                        for _ in range(pad):
                            padding.append(
                                singa.SliceOn(x, axis_shape - 1, axis_shape,
                                              axis))
                        padding = singa.VecTensor(padding)
                        x = singa.ConcatOn(padding, axis)
        return x