def shutdown()

in graphlearn_torch/python/distributed/dist_sampling_producer.py [0:0]


  def shutdown(self):
    r""" Shutdown sampler event loop and rpc server. Join the subprocesses.
    """
    if not self._shutdown:
      self._shutdown = True
      try:
        for q in self._task_queues:
          q.put((MpCommand.STOP, None))
        for w in self._workers:
          w.join(timeout=MP_STATUS_CHECK_INTERVAL)
        for q in self._task_queues:
          q.cancel_join_thread()
          q.close()
      finally:
        for w in self._workers:
          if w.is_alive():
            w.terminate()