stylegan2_ada_pytorch/calc_metrics.py [37:57]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        init_file = os.path.abspath(os.path.join(temp_dir, ".torch_distributed_init"))
        if os.name == "nt":
            init_method = "file:///" + init_file.replace("\\", "/")
            torch.distributed.init_process_group(
                backend="gloo",
                init_method=init_method,
                rank=rank,
                world_size=args.num_gpus,
            )
        else:
            init_method = f"file://{init_file}"
            torch.distributed.init_process_group(
                backend="nccl",
                init_method=init_method,
                rank=rank,
                world_size=args.num_gpus,
            )

    # Init torch_utils.
    sync_device = torch.device("cuda", rank) if args.num_gpus > 1 else None
    training_stats.init_multiprocessing(rank=rank, sync_device=sync_device)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



stylegan2_ada_pytorch/train.py [617:636]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        init_file = os.path.abspath(os.path.join(temp_dir, ".torch_distributed_init"))
        if os.name == "nt":
            init_method = "file:///" + init_file.replace("\\", "/")
            torch.distributed.init_process_group(
                backend="gloo",
                init_method=init_method,
                rank=rank,
                world_size=args.num_gpus,
            )
        else:
            init_method = f"file://{init_file}"
            torch.distributed.init_process_group(
                backend="nccl",
                init_method=init_method,
                rank=rank,
                world_size=args.num_gpus,
            )
        # Init torch_utils.
        sync_device = torch.device("cuda", rank) if args.num_gpus > 1 else None
        training_stats.init_multiprocessing(rank=rank, sync_device=sync_device)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



