in distributed_training/train_pytorch_smdataparallel_maskrcnn.py [0:0]
def mlperf_test_early_exit(iteration, iters_per_epoch, tester, model, distributed, min_bbox_map, min_segm_map):
if iteration > 0 and iteration % iters_per_epoch == 0:
epoch = iteration // iters_per_epoch
logger = logging.getLogger('maskrcnn_benchmark.trainer')
logger.info("Starting evaluation...")
bbox_map, segm_map = test_and_exchange_map(tester, model, distributed)
# necessary for correctness
model.train()
logger.info('bbox mAP: {}, segm mAP: {}'.format(bbox_map, segm_map))
# terminating condition
if bbox_map >= min_bbox_map and segm_map >= min_segm_map:
logger.info("Target mAP reached, exiting...")
return True
return False