in backend/code_review_backend/issues/serializers.py [0:0]
def create(self, validated_data):
diff = validated_data.get("diff_id", None)
link_attrs = defaultdict(list)
# Separate attributes that are specific to the IssueLink M2M
for issue in validated_data["issues"]:
link_attrs[issue["hash"]].append(
{
"new_for_revision": issue.pop(
"issue_links__new_for_revision", None
),
"in_patch": issue.pop("issue_links__in_patch", None),
"line": issue.pop("issue_links__line", None),
"nb_lines": issue.pop("issue_links__nb_lines", None),
"char": issue.pop("issue_links__path", None),
}
)
# Only create issues that do not exist yet
Issue.objects.bulk_create(
[Issue(**values) for values in validated_data["issues"]],
ignore_conflicts=True,
)
# Retrieve issues to get existing IDs
hashes = set(link_attrs.keys())
known_issues = {i.hash: i for i in Issue.objects.filter(hash__in=hashes)}
assert set(known_issues.keys()) == hashes, "Failed to create all issues"
# Create all links, using DB conflicts
links = IssueLink.objects.bulk_create(
[
IssueLink(
issue_id=known_issues[issue_hash].id,
diff=diff,
revision=self.context["revision"],
**link,
)
for issue_hash, links in link_attrs.items()
for link in links
],
ignore_conflicts=True,
)
# Endpoint expects Issue with specific attributes for re-serialization of links
# TODO in treeherder: only expose hash & publishable in output
output = []
for issue_hash, links in link_attrs.items():
for link in links:
existing_issue = known_issues[issue_hash]
# Set attributes for re-serialization
output_link = {f"issue_links__{k}": v for k, v in link.items()}
output_link.update(vars(existing_issue))
output_link["publishable"] = (
link["in_patch"] and existing_issue.level == LEVEL_ERROR
)
output.append(output_link)
return {
"diff_id": diff,
"issues": output,
}