in utils/util.py [0:0]
def parse_option():
parser = argparse.ArgumentParser('training')
# dataset
parser.add_argument('--data_dir', type=str, required=True, help='root director of dataset')
parser.add_argument('--dataset', type=str, default='KineticsClipFolderDatasetOrderTSN', help='dataset to training')
parser.add_argument('--datasplit', type=str, default='train')
parser.add_argument('--crop', type=float, default=0.2, help='minimum crop')
parser.add_argument('--cropsize', type=int, default=224)
parser.add_argument('--batch_size', type=int, default=128, help='batch_size')
parser.add_argument('--num_workers', type=int, default=8, help='num of workers to use')
# model
parser.add_argument('--model_mlp', action='store_true', default=False)
# loss function
parser.add_argument('--alpha', type=float, default=0.999, help='exponential moving average weight')
parser.add_argument('--nce_k', type=int, default=131072, help='num negative sampler')
parser.add_argument('--nce_t', type=float, default=0.10, help='NCE temperature')
parser.add_argument('--nce_t_intra', type=float, default=0.10, help='NCE temperature')
# optimization
parser.add_argument('--base_lr', type=float, default=0.1,
help='base learning when batch size = 256. final lr is determined by linear scale')
parser.add_argument('--lr_scheduler', type=str, default='cosine',
choices=["cosine"], help="learning rate scheduler")
parser.add_argument('--warmup_epoch', type=int, default=5, help='warmup epoch')
parser.add_argument('--warmup_multiplier', type=int, default=100, help='warmup multiplier')
parser.add_argument('--lr_decay_epochs', type=int, default=[120, 160, 200], nargs='+',
help='for step scheduler. where to decay lr, can be a list')
parser.add_argument('--lr_decay_rate', type=float, default=0.1,
help='for step scheduler. decay rate for learning rate')
parser.add_argument('--weight_decay', type=float, default=1e-4, help='weight decay')
parser.add_argument('--momentum', type=float, default=0.9, help='momentum for SGD')
parser.add_argument('--epochs', type=int, default=400, help='number of training epochs')
parser.add_argument('--start_epoch', type=int, default=1, help='used for resume')
# io
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--pretrained_model', default='', type=str, metavar='PATH',
help='path to pretrained weights like imagenet (default: none)')
parser.add_argument('--print_freq', type=int, default=10, help='print frequency')
parser.add_argument('--save_freq', type=int, default=10, help='save frequency')
parser.add_argument('--output_dir', type=str, default='./output', help='output director')
# misc
parser.add_argument("--local_rank", type=int, help='local rank for DistributedDataParallel')
parser.add_argument("--broadcast_buffer", action='store_true', default=False, help='broadcast_buffer for DistributedDataParallel')
parser.add_argument("--rng_seed", type=int, default=-1, help='manual seed')
args = parser.parse_args()
return args