def run()

in sample_workloads/pingpong/pingpong.py [0:0]


def run(local_rank):
  global_rank = dist.get_rank()
  tensor = torch.zeros(1).cuda(local_rank)

  if global_rank == 0:
    for rank_recv in range(1, dist.get_world_size()):
      dist.send(tensor=tensor, dst=rank_recv)
      print("Rank {} sent data to Rank {}\n".format(0, rank_recv))
  else:
    dist.recv(tensor=tensor, src=0)
    print("Rank {} has received data from rank {}\n".format(global_rank, 0))