def _do_forward()

in python/singa/autograd.py [0:0]


    def _do_forward(self, *xs):
        """
        Do not call this function from user code. It is called by __call__().
        Args:
            xs, Tensor instance(s)
        Returns:
            Tensor instance(s)
        """
        # TODO add the pre hook
        assert all([isinstance(x, Tensor) for x in xs
                   ]), "xs should include only Tensor instances"

        # need to do backward if any of its input arg needs gradient
        self.requires_grad = any([x.requires_grad for x in xs])

        self.src = []
        for x in xs:
            if x.stores_grad:
                # store the tensor whose gradient needs be returned in
                # backward(), e.g. if x is parameter
                self.src.append((x.creator, id(x), x, x.stores_grad))
            else:
                # for intermediate tensors, they will be released soon;
                # no need to store them --> use None
                self.src.append((x.creator, id(x), None, x.stores_grad))

        # get the CTensor (data) if the input arg is Tensor
        xs = tuple(x.data for x in xs)
        ys = self.forward(*xs)
        if not isinstance(ys, tuple):
            ys = (ys,)
        # create Tensor based on CTensor(data);
        # assume outputs are all Tensor instances
        ys = tuple(
            Tensor(
                device=y.device(),
                data=y,
                requires_grad=self.requires_grad,
                creator=self,
                name=self.output_name(idx),
            ) for idx, y in enumerate(ys))
        # map from python id to output index
        self.y_id2idx = {id(y): i for i, y in enumerate(ys)}
        # TODO add the post hook
        return ys