in sample_workloads/lit-gpt-demo/utilities/monitor_collectives.py [0:0]
def _should_rank_print(group=None, peer_rank=None, root_rank=None):
if not _SHOULD_PRINT:
return False
if root_rank is not None:
leader = root_rank
elif group is not None:
leader = torch.distributed.get_global_rank(group, 0)
else:
leader = 0
return (peer_rank is not None) or torch.distributed.get_rank() == leader