train/compute/python/lib/pytorch/operator_impl.py [57:76]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    def cleanup(self):
        self.fwd_out = None
        self.grad_in = None
        gc.collect()

    def forward(self, *args, **kwargs):
        self.fwd_out = self.func(*args, **kwargs)
        return self.fwd_out

    def create_grad(self):
        if not self.fwd_out.is_leaf:
            self.grad_in = torch.ones_like(self.fwd_out)
        else:
            logger.debug(f"{self.constructor.__name__}: skipping create_grad() due to forward result is leaf tensor.")

    def backward(self):
        if not self.fwd_out.is_leaf:
            self.fwd_out.backward(self.grad_in)
        else:
            logger.debug(f"{self.constructor.__name__}: skipping backward() due to forward result is leaf tensor.")
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



train/compute/python/lib/pytorch/operator_impl.py [99:118]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    def cleanup(self):
        self.fwd_out = None
        self.grad_in = None
        gc.collect()

    def forward(self, *args, **kwargs):
        self.fwd_out = self.func(*args, **kwargs)
        return self.fwd_out

    def create_grad(self):
        if not self.fwd_out.is_leaf:
            self.grad_in = torch.ones_like(self.fwd_out)
        else:
            logger.debug(f"{self.constructor.__name__}: skipping create_grad() due to forward result is leaf tensor.")

    def backward(self):
        if not self.fwd_out.is_leaf:
            self.fwd_out.backward(self.grad_in)
        else:
            logger.debug(f"{self.constructor.__name__}: skipping backward() due to forward result is leaf tensor.")
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



