def main()

in tools/evaluate_pq_for_semantic_segmentation.py [0:0]


def main():
    parser = default_argument_parser()
    args = parser.parse_args()

    _root = os.getenv("DETECTRON2_DATASETS", "datasets")
    json_file = args.json_file

    with open(json_file) as f:
        predictions = json.load(f)

    imgToAnns = defaultdict(list)
    for pred in predictions:
        image_id = os.path.basename(pred["file_name"]).split(".")[0]
        imgToAnns[image_id].append(
            {"category_id" : pred["category_id"], "segmentation" : pred["segmentation"]}
        )

    image_ids = list(imgToAnns.keys())

    meta = MetadataCatalog.get(args.dataset_name)
    class_names = meta.stuff_classes
    num_classes = len(meta.stuff_classes)
    ignore_label = meta.ignore_label
    conf_matrix = np.zeros((num_classes + 1, num_classes + 1), dtype=np.int64)

    categories = {}
    for i in range(num_classes):
        categories[i] = {"id": i, "name": class_names[i], "isthing": 0}

    pq_stat = PQStat()
    
    for image_id in tqdm(image_ids):
        if args.dataset_name == "ade20k_sem_seg_val":
            gt_dir = os.path.join(_root, "ADEChallengeData2016", "annotations_detectron2", "validation")
            segm_gt = read_image(os.path.join(gt_dir, image_id + ".png")).copy().astype(np.int64)
        elif args.dataset_name == "coco_2017_test_stuff_10k_sem_seg":
            gt_dir = os.path.join(_root, "coco", "coco_stuff_10k", "annotations_detectron2", "test")
            segm_gt = read_image(os.path.join(gt_dir, image_id + ".png")).copy().astype(np.int64)
        elif args.dataset_name == "ade20k_full_sem_seg_val":
            gt_dir = os.path.join(_root, "ADE20K_2021_17_01", "annotations_detectron2", "validation")
            segm_gt = read_image(os.path.join(gt_dir, image_id + ".tif")).copy().astype(np.int64)
        else:
            raise ValueError(f"Unsupported dataset {args.dataset_name}")

        # get predictions
        segm_dt = np.zeros_like(segm_gt)
        anns = imgToAnns[image_id]
        for ann in anns:
            # map back category_id
            if hasattr(meta, "stuff_dataset_id_to_contiguous_id"):
                if ann["category_id"] in meta.stuff_dataset_id_to_contiguous_id:
                    category_id = meta.stuff_dataset_id_to_contiguous_id[ann["category_id"]]
            else:
                category_id = ann["category_id"]
            mask = maskUtils.decode(ann["segmentation"])
            segm_dt[mask > 0] = category_id

        # miou
        gt = segm_gt.copy()
        pred = segm_dt.copy()
        gt[gt == ignore_label] = num_classes
        conf_matrix += np.bincount(
            (num_classes + 1) * pred.reshape(-1) + gt.reshape(-1),
            minlength=conf_matrix.size,
        ).reshape(conf_matrix.shape)

        # pq
        pq_stat_single = pq_compute_single_image(segm_gt, segm_dt, categories, meta.ignore_label)
        pq_stat += pq_stat_single

    metrics = [("All", None), ("Stuff", False)]
    results = {}
    for name, isthing in metrics:
        results[name], per_class_results = pq_stat.pq_average(categories, isthing=isthing)
        if name == 'All':
            results['per_class'] = per_class_results
    print("{:10s}| {:>5s}  {:>5s}  {:>5s} {:>5s}".format("", "PQ", "SQ", "RQ", "N"))
    print("-" * (10 + 7 * 4))

    for name, _isthing in metrics:
        print("{:10s}| {:5.1f}  {:5.1f}  {:5.1f} {:5d}".format(
            name,
            100 * results[name]['pq'],
            100 * results[name]['sq'],
            100 * results[name]['rq'],
            results[name]['n'])
        )

    # calculate miou
    acc = np.full(num_classes, np.nan, dtype=np.float64)
    iou = np.full(num_classes, np.nan, dtype=np.float64)
    tp = conf_matrix.diagonal()[:-1].astype(np.float64)
    pos_gt = np.sum(conf_matrix[:-1, :-1], axis=0).astype(np.float64)
    pos_pred = np.sum(conf_matrix[:-1, :-1], axis=1).astype(np.float64)
    acc_valid = pos_gt > 0
    acc[acc_valid] = tp[acc_valid] / pos_gt[acc_valid]
    iou_valid = (pos_gt + pos_pred) > 0
    union = pos_gt + pos_pred - tp
    iou[acc_valid] = tp[acc_valid] / union[acc_valid]
    miou = np.sum(iou[acc_valid]) / np.sum(iou_valid)

    print("")
    print(f"mIoU: {miou}")