def _get_train_data_loader()

in distributed_training/src_dir/main_trainer.py [0:0]


def _get_train_data_loader(args, **kwargs):

    transform = Compose([
        RandomResizedCrop(args.height, args.width),
        GaussNoise(p=0.2),
        VerticalFlip(p=0.5),
        OneOf([
            MotionBlur(p=.2),
            MedianBlur(blur_limit=3, p=0.1),
            Blur(blur_limit=3, p=0.1),
        ],
              p=0.2),
        OneOf([
            CLAHE(clip_limit=2),
            Sharpen(),
            Emboss(),
            RandomBrightnessContrast(),
        ],
              p=0.3),
        HueSaturationValue(p=0.3),
        Normalize(
            mean=[0.485, 0.456, 0.406],
            std=[0.229, 0.224, 0.225],
        )
    ],
                        p=1.0)

    train_sampler = None
    train_dataloader = None

    dataset = AlbumentationImageDataset(image_path=os.path.join(
        args.data_dir, 'train'),
                                        transform=transform,
                                        args=args)

    drop_last = args.model_parallel

    train_sampler = data.distributed.DistributedSampler(
        dataset, num_replicas=int(args.world_size), rank=int(
            args.rank)) if args.multigpus_distributed else None
    train_dataloader = data.DataLoader(dataset,
                                       batch_size=args.batch_size,
                                       shuffle=train_sampler is None,
                                       sampler=train_sampler,
                                       drop_last=drop_last,
                                       **kwargs)
    return train_dataloader, train_sampler