def get_bugs()

in bugbot/rules/duplicate_copy_metadata.py [0:0]


    def get_bugs(self, date="today", bug_ids=[], chunk_size=None):
        dup_bugs = super().get_bugs(date, bug_ids, chunk_size)

        original_bug_ids = {bug["dupe_of"] for bug in dup_bugs.values()}
        original_bugs = {}

        def bughandler(bug, data):
            data: data[str(bug["id"])] = bug

        Bugzilla(
            original_bug_ids,
            include_fields=[
                "id",
                "summary",
                "keywords",
                "duplicates",
                "cf_accessibility_severity",
                "cf_performance_impact",
                "comments",
                "history",
                "status",
                "regressed_by",
                "is_open",
                "cf_webcompat_priority",
                "groups",
            ],
            bughandler=bughandler,
            bugdata=original_bugs,
        ).wait()

        results = {}
        for bug_id, bug in original_bugs.items():
            if not bug["is_open"]:
                continue

            is_public = not bug["groups"]

            copied_fields = {}
            for dup_bug_id in bug["duplicates"]:
                dup_bug_id = str(dup_bug_id)
                dup_bug = dup_bugs.get(dup_bug_id)
                if not dup_bug:
                    continue

                if is_public and dup_bug["groups"]:
                    # We avoid copying fields from private to public bugs
                    continue

                # TODO: Since the logic for copied fields is getting bigger,
                # consider refactoring it in a separate method.

                # Performance Impact: copy the assessment result from duplicates
                if bug.get("cf_performance_impact") == "---" and dup_bug.get(
                    "cf_performance_impact"
                ) not in ("---", "?", None):
                    if "cf_performance_impact" not in copied_fields:
                        copied_fields["cf_performance_impact"] = {
                            "from": [dup_bug["id"]],
                            "value": dup_bug["cf_performance_impact"],
                        }
                    else:
                        copied_fields["cf_performance_impact"]["from"].append(
                            dup_bug["id"]
                        )

                # Keywords: copy the `access` keyword from duplicates
                if "access" not in bug["keywords"] and "access" in dup_bug["keywords"]:
                    if "keywords" not in copied_fields:
                        copied_fields["keywords"] = {
                            "from": [dup_bug["id"]],
                            "value": "access",
                        }
                    else:
                        copied_fields["keywords"]["from"].append(dup_bug["id"])

                # Accessibility severity: copy the rating from duplicates
                if (
                    bug.get("cl_accessibility_severity") == "---"
                    and dup_bug.get("cl_accessibility_severity") != "---"
                ):
                    new_access_severity = dup_bug["cl_accessibility_severity"]
                    assert new_access_severity in ("s1", "s2", "s3", "s4")

                    if (
                        "cf_accessibility_severity" not in copied_fields
                        or new_access_severity
                        < copied_fields["cf_accessibility_severity"]["value"]
                    ):
                        copied_fields["cf_accessibility_severity"] = {
                            "from": [dup_bug["id"]],
                            "value": new_access_severity,
                        }
                    elif (
                        new_access_severity
                        == copied_fields["cf_accessibility_severity"]["value"]
                    ):
                        copied_fields["cf_accessibility_severity"]["from"].append(
                            dup_bug["id"]
                        )

                # Webcompat Priority: copy the `cf_webcompat_priority` from duplicates
                if (
                    bug.get("cf_webcompat_priority") == "---"
                    and dup_bug.get("cf_webcompat_priority")
                    in WebcompatPriority.NOT_EMPTY_VALUES
                ):
                    new_priority = dup_bug["cf_webcompat_priority"]

                    # Since the bug does not have a priority, it does not make
                    # sense to set it to `revisit`. Instead, we set it to `?` to
                    # request triage.
                    if new_priority == "revisit":
                        new_priority = "?"

                    if (
                        "cf_webcompat_priority" not in copied_fields
                        or WebcompatPriority(new_priority)
                        > WebcompatPriority(
                            copied_fields["cf_webcompat_priority"]["value"]
                        )
                    ):
                        copied_fields["cf_webcompat_priority"] = {
                            "from": [dup_bug["id"]],
                            "value": new_priority,
                        }
                    elif (
                        new_priority == copied_fields["cf_webcompat_priority"]["value"]
                    ):
                        copied_fields["cf_webcompat_priority"]["from"].append(
                            dup_bug["id"]
                        )

                # Status: confirm the bug if the duplicate was confirmed
                if bug["status"] == "UNCONFIRMED" and self.was_confirmed(dup_bug):
                    if "status" not in copied_fields:
                        copied_fields["status"] = {
                            "from": [dup_bug["id"]],
                            "value": "NEW",
                        }
                    else:
                        copied_fields["status"]["from"].append(dup_bug["id"])

                # Regressed by: move the regressed_by field to the duplicate of
                if dup_bug["regressed_by"]:
                    added_regressed_by = self.get_previously_added_regressors(bug)
                    new_regressed_by = {
                        regression_bug_id
                        for regression_bug_id in dup_bug["regressed_by"]
                        if regression_bug_id not in added_regressed_by
                        and regression_bug_id < int(bug_id)
                    }
                    if new_regressed_by:
                        if "regressed_by" not in copied_fields:
                            copied_fields["regressed_by"] = {
                                "from": [dup_bug["id"]],
                                "value": new_regressed_by,
                            }
                        else:
                            copied_fields["regressed_by"]["from"].append(dup_bug["id"])
                            copied_fields["regressed_by"]["value"] |= new_regressed_by

            previously_copied_fields = self.get_previously_copied_fields(bug)
            # We do not need to ignore the `regressed_by` field because we
            # already check the history to avoid overwriting the engineers.
            previously_copied_fields.discard("regressed_by")
            copied_fields = sorted(
                (
                    field,
                    change["value"],
                    change["from"],
                )
                for field, change in copied_fields.items()
                if field not in previously_copied_fields
            )

            if copied_fields:
                results[bug_id] = {
                    "id": bug_id,
                    "summary": bug["summary"],
                    "copied_fields": copied_fields,
                }

                self.set_autofix(bug, copied_fields)

        return results