def _set_distributed_environment()

in src/sagemaker_pytorch_container/training.py [0:0]


def _set_distributed_environment(training_env):
    """Set environment variable for distributed training.

    Args:
        hosts: list of hosts that are used for training.
    """
    # According to https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms-training-algo.html
    # hosts are sorted lexicographically.
    os.environ['MASTER_ADDR'] = training_env.master_hostname
    os.environ['MASTER_PORT'] = MASTER_PORT