in graphlearn_torch/python/distributed/dist_client.py [0:0]
def shutdown_client():
r""" Shutdown the client on the current process, notify other servers to
exit, and destroy all connections.
"""
current_context = get_context()
if current_context is None:
logging.warning("'shutdown_client': try to shutdown client when the "
"current process has not been initialized as a client.")
return
if not current_context.is_client():
raise RuntimeError(f"'shutdown_client': role type of the current process "
f"context is not a client, got {current_context.role}.")
# step 1: synchronize with all other clients.
barrier()
# step 2: use client-0 to notify all servers to exit after all clients
# have reached here.
current_context = get_context()
if current_context.rank == 0:
for server_rank in range(current_context.num_servers()):
exit_status = request_server(server_rank, DistServer.exit)
assert exit_status is True, f"Failed to exit server {server_rank}"
# step 3: shutdown rpc across all servers and clients.
shutdown_rpc()