def _fetch_leaderboard()

in scripts/competitiond.py [0:0]


    def _fetch_leaderboard(self):
        """
        Fetches the evaluation bundles tagged for the leaderboard, along with
        the corresponding submission bundles if they exist.

        :return: (eval_bundles, eval2submit) where eval_bundles is a dict mapping
                 evaluation bundle ids to the evaluation bundles themselves, and
                 eval2submit is a dict mapping evaluation bundle id to the
                 original submission bundle. The id will not be a key in
                 eval2submit if a corresponding submission bundle does not exist.
        """
        logger.debug("Fetching the leaderboard")
        # Fetch bundles on current leaderboard
        eval_bundles = self.client.fetch(
            "bundles",
            params={
                "keywords": [
                    ".mine",  # don't allow others to forge evaluations
                    "tags={evaluate[tag]}".format(**self.config),
                    ".limit={max_leaderboard_size}".format(**self.config),
                ]
            },
        )
        eval_bundles = {b["id"]: b for b in eval_bundles}

        # Build map from submission bundle id => eval bundle
        submit2eval = {}
        for eval_id, eval_bundle in eval_bundles.items():
            meta = self._get_competition_metadata(eval_bundle)
            # Eval bundles that are missing competition metadata are simply
            # skipped; code downstream must handle the case where eval2submit
            # does not contain an entry for a given eval bundle
            if meta is not None:
                # Allow manual hiding
                if meta.get("hide", False):
                    del eval_bundles[eval_id]
                else:
                    submit2eval[meta["submit_id"]] = eval_bundle

        # Fetch the original submission bundles.
        # A NotFoundError will be thrown if a bundle no longer exists.
        # We will remove that submission from the leaderboard, and keep
        # trying until there are no more deleted bundles.
        logger.debug("Fetching corresponding original submission bundles")
        while True:
            if len(eval_bundles) == 0:
                submit_bundles = {}
                break
            try:
                uuids = list(submit2eval.keys())
                submit_bundles = []
                for start in range(0, len(uuids), 50):
                    end = start + 50
                    submit_bundles.extend(
                        self.client.fetch(
                            "bundles",
                            params={
                                "specs": uuids[start:end],
                                "worksheet": self.config["log_worksheet_uuid"],
                                "include": ["owner", "group_permissions"],
                            },
                        )
                    )
                break
            except NotFoundError as e:
                missing_submit_uuid = re.search(UUID_STR, str(e)).group(0)
                eval_uuid = submit2eval[missing_submit_uuid]["id"]

                # If a submission bundle (missing_uuid) has been deleted...
                if self.config["allow_orphans"]:
                    # Just clear the competition metadata on the eval bundle,
                    # thus removing the reference to the original submit bundle
                    logger.info(
                        "Clearing reference to deleted submission %s", missing_submit_uuid,
                    )
                    self._clear_competition_metadata(eval_bundles[eval_uuid])
                    pass
                else:
                    # Untag and remove entry from the leaderboard entirely
                    logger.info("Removing submission %s", missing_submit_uuid)
                    self._untag(
                        [submit2eval[missing_submit_uuid]], self.config["evaluate"]["tag"],
                    )
                    del eval_bundles[eval_uuid]

                # Drop from list of submit bundles and try fetching batch again
                del submit2eval[missing_submit_uuid]
                continue

        # Build map from eval bundle id => submission bundle
        eval2submit = {}
        for submit_bundle in submit_bundles:
            eval_bundle = submit2eval[submit_bundle["id"]]
            eval2submit[eval_bundle["id"]] = submit_bundle

        return eval_bundles, eval2submit