def lazy_init()

in graphlearn_torch/python/data/graph.py [0:0]


  def lazy_init(self):
    if self._graph is not None:
      return

    self._graph = pywrap.Graph()
    indptr = self.topo.indptr
    indices = self.topo.indices
    if self.topo.edge_ids is not None:
      edge_ids = self.topo.edge_ids
    else:
      edge_ids = torch.empty(0)

    if self.topo.edge_weights is not None:
      edge_weights = self.topo.edge_weights
    else:
      edge_weights = torch.empty(0)

    if self.mode == 'CPU':
      self._graph.init_cpu_from_csr(indptr, indices, edge_ids, edge_weights)
    else:
      if self.device is None:
        self.device = torch.cuda.current_device()

      if self.mode == 'CUDA':
        self._graph.init_cuda_from_csr(
          indptr, indices, self.device, pywrap.GraphMode.DMA, edge_ids
        )
      elif self.mode == 'ZERO_COPY':
        self._graph.init_cuda_from_csr(
          indptr, indices, self.device, pywrap.GraphMode.ZERO_COPY, edge_ids
        )
      else:
        raise ValueError(f"'{self.__class__.__name__}': "
                         f"invalid mode {self.mode}")