in graphlearn_torch/python/distributed/dist_loader.py [0:0]
def shutdown(self):
if self._shutdowned:
return
if self._is_collocated_worker:
self._collocated_producer.shutdown()
elif self._is_mp_worker:
self._mp_producer.shutdown()
else:
if rpc_is_initialized() is True:
for server_rank, producer_id in zip(self._server_rank_list, self._producer_id_list):
request_server(
server_rank, DistServer.destroy_sampling_producer,
producer_id
)
self._shutdowned = True