in bot/code_review_bot/workflow.py [0:0]
def run(self, revision):
"""
Find all issues on remote tasks and publish them
"""
# Index ASAP Taskcluster task for this revision
self.index(revision, state="started")
# Set the Phabricator build as running
self.update_status(revision, state=BuildState.Work)
if settings.taskcluster_url:
self.publish_link(
revision,
slug="publication",
name="Publication task",
url=settings.taskcluster_url,
)
# Analyze revision patch to get files/lines data
revision.analyze_patch()
# Find issues on remote tasks
issues, task_failures, notices, reviewers = self.find_issues(
revision, settings.try_group_id
)
# Analyze issues in case the before/after feature is enabled
if revision.before_after_feature:
logger.info("Running the before/after feature")
# Search a base revision from the decision task
decision = self.queue_service.task(settings.try_group_id)
base_rev_changeset = (
decision.get("payload", {}).get("env", {}).get("GECKO_BASE_REV")
)
if not base_rev_changeset:
logger.warning(
"Base revision changeset could not be fetched from Phabricator, "
"looking for existing issues based on the current date",
task=settings.try_group_id,
)
# Clone local repo when required
# as find_previous_issues will build the hashes
self.clone_repository(revision)
# Mark know issues to avoid publishing them on this patch
self.find_previous_issues(issues, base_rev_changeset)
new_issues_count = sum(issue.new_issue for issue in issues)
logger.info(
f"Found {new_issues_count} new issues (over {len(issues)} total detected issues)",
task=settings.try_group_id,
)
else:
# Clone local repo when required
# as publication need the hashes
self.clone_repository(revision)
if (
all(issue.new_issue is False for issue in issues)
and not task_failures
and not notices
):
logger.info("No issues nor notices, stopping there.")
# Publish all issues
self.publish(revision, issues, task_failures, notices, reviewers)
return issues