async update()

in src/BuildHistoryDisplay.js [203:484]


  async update() {
    const currentTime = new Date();
    this.setState({ currentTime: currentTime });
    // NB: server-slide slicing doesn't really help, Jenkins seems to
    // load everything into memory anyway
    let data;
    if (true) {
      // STOP.  You want more results?  You may have noticed that on
      // Google, people suggest using allBuilds with {0,n} to make use
      // of Jenkins pagination.  However, if you do this, it will *DOS our Jeenkins
      // instance*; even when pagination is requested, Jenkins will
      // still load ALL builds into memory before servicing your
      // request.  I've filed this at https://issues.jenkins-ci.org/browse/JENKINS-49908
      data = await jenkins.job(this.props.job, {
        tree: `builds[
                  url,
                  number,
                  duration,
                  timestamp,
                  result,
                  actions[parameters[name,value],
                  causes[shortDescription]],
                  changeSet[items[commitId,comment,msg]],
                  subBuilds[
                    result,jobName,url,duration,
                    build[
                      subBuilds[
                        result,jobName,url,duration,
                        build[
                          subBuilds[result,jobName,url,duration]
                        ]
                      ]
                    ]
                  ]
               ]`.replace(/\s+/g, ""),
      });
      // build[builtOn]
    } else {
      // If you want entries in build on subBuilds, need depth = 3
      // Otherwise, most data can be got with depth = 1
      const depth = 1;
      data = await jenkins.job(this.props.job, { depth: depth });
    }
    data.updateTime = new Date();
    data.connectedIn = data.updateTime - currentTime;
    if (data.allBuilds !== undefined) {
      data.builds = data.allBuilds;
    }

    // Get build statuses from Github for CircleCI
    async function get_github_commit_statuses() {
      let github_commit_statuses = {};
      let requests = [];

      function add_jobs(jobs, index) {
        let commitId = requests[index].commitId;
        if (jobs) {
          for (let job_name in jobs) {
            let job = jobs[job_name];
            if (!github_commit_statuses[commitId].hasOwnProperty(job_name)) {
              github_commit_statuses[commitId][job_name] = {
                duration: "0",
                result: job.status,
                url: job.build_url,
              };
            }
          }
        }
      }

      for (const commit of data.builds) {
        for (let i = 0; i < commit.changeSet.items.length; i++) {
          let commitId = commit.changeSet.items[i].commitId;
          if (!github_commit_statuses.hasOwnProperty(commitId)) {
            github_commit_statuses[commitId] = {};
          }
          requests.push({
            url:
              "https://s3.amazonaws.com/ossci-job-status/combined/" +
              commitId +
              ".json",
            commitId,
          });
        }
      }
      let results = await jenkins.batch_get(
        requests.map((request) => request.url)
      );
      results.forEach(add_jobs);
      return github_commit_statuses;
    }
    data.github_commit_statuses = await get_github_commit_statuses();

    const known_jobs_set = new Set();
    function collect_known_jobs_set(topBuild) {
      function go(subBuild) {
        if (
          subBuild.build &&
          subBuild.build._class ===
            "com.tikal.jenkins.plugins.multijob.MultiJobBuild"
        ) {
          subBuild.build.subBuilds.forEach(go);
        } else {
          known_jobs_set.add(getJobName(subBuild));
        }
      }
      topBuild.subBuilds.forEach(go);
    }
    const props_mode = this.props.mode;
    if (props_mode !== "binary") {
      data.builds.forEach(collect_known_jobs_set);
    }

    if (data.github_commit_statuses) {
      Object.keys(data.github_commit_statuses).forEach(function (commit) {
        var jobs = data.github_commit_statuses[commit];
        Object.keys(jobs).forEach(function (job_name) {
          if (props_mode !== "binary") {
            // Warning: quadratic police!
            for (var i = 0; i < binary_and_smoke_tests_on_pr.length; i++) {
              if (job_name.endsWith(binary_and_smoke_tests_on_pr[i])) {
                known_jobs_set.add("_" + job_name); // Add "_" before name to make sure CircleCI builds always show up on the left
                break;
              }
            }
            if (
              !(job_name.includes("binary_") || job_name.includes("smoke_"))
            ) {
              // Exclude binary builds and smoke tests that are not running on every PR
              known_jobs_set.add("_" + job_name); // Add "_" before name to make sure CircleCI builds always show up on the left
            }
          } else {
            if (job_name.includes("binary_") || job_name.includes("smoke_")) {
              let found = false;
              for (let i = 0; i < binary_and_smoke_tests_on_pr.length; i++) {
                if (job_name.endsWith(binary_and_smoke_tests_on_pr[i])) {
                  found = true;
                  break;
                }
              }
              if (!found) known_jobs_set.add("_" + job_name);
            }
          }
        });
      });
    }

    function compareFun(x, y) {
      const sx = jobs_on_pr.some((e) => summarize_job(x).startsWith(e));
      const sy = jobs_on_pr.some((e) => summarize_job(y).startsWith(e));
      if (sx < sy) return 1;
      else if (sx > sy) return -1;
      else if (x < y) return -1;
      else if (x > y) return 1;
      else return 0;
    }
    data.known_jobs = [...known_jobs_set.values()].sort(compareFun);

    data.builds.forEach((build) => {
      const sb_map = new Map();

      // Collect job status from Jenkins
      function collect_jobs(topBuild) {
        function go(subBuild) {
          if (
            subBuild.build &&
            subBuild.build._class ===
              "com.tikal.jenkins.plugins.multijob.MultiJobBuild"
          ) {
            subBuild.build.subBuilds.forEach(go);
          } else {
            sb_map.set(getJobName(subBuild), subBuild);
          }
        }
        topBuild.subBuilds.forEach(go);
      }
      collect_jobs(build);

      // Collect job status for non-Jenkins jobs (i.e. CircleCI jobs)
      async function collect_jobs_from_github_status(build) {
        if (build.changeSet.items.length > 0) {
          for (var i = 0; i < build.changeSet.items.length; i++) {
            let commitId = build.changeSet.items[i].commitId;
            if (data.github_commit_statuses) {
              Object.keys(data.github_commit_statuses[commitId]).forEach(
                function (job_name) {
                  var job = data.github_commit_statuses[commitId][job_name];
                  sb_map.set("_" + job_name, {
                    duration: job.duration,
                    result: job.result,
                    url: job.url,
                  });
                }
              );
            }
          }
        }
      }
      collect_jobs_from_github_status(build);
      build.sb_map = sb_map;
    });

    // Figure out if we think something is broken or not.
    //  1. Consider the MOST RECENT finished build for any given sub
    //     build type.  If it is success, it's fine.
    //  2. Otherwise, check builds prior to it.  If the previous build
    //     also failed, we think it's broken!
    //
    // Special cases:
    //  - pytorch_doc_push: don't care about this
    //  - nightlies: these don't run all the time

    const failure_window = 10;
    if (this.props.job.includes("master")) {
      const still_unknown_set = new Set();
      const consecutive_failure_count = new Map();
      data.known_jobs.forEach((job) => {
        if (job === "pytorch_doc_push") return;
        if (job === "__dr.ci") return;
        if (job.includes("nightlies")) return;
        still_unknown_set.add(job);
      });
      for (let i = 0; i < data.builds.length; i++) {
        // After some window, don't look anymore; the job may have been
        // removed
        if (i > failure_window) break;
        if (!still_unknown_set.size) break;
        const build = data.builds[i];
        const sb_map = build.sb_map;
        sb_map.forEach((sb, jobName) => {
          if (!still_unknown_set.has(jobName)) {
            // do nothing
          } else if (is_failure(sb.result)) {
            let count = consecutive_failure_count.get(jobName) || 0;
            count++;
            consecutive_failure_count.set(jobName, count);
          } else if (is_success(sb.result)) {
            still_unknown_set.delete(jobName);
          }
        });
      }

      // Prune uninteresting alarms
      consecutive_failure_count.forEach((v, k) => {
        // Require two consecutive failure to alert
        if (v <= 1) {
          consecutive_failure_count.delete(k);
        }
      });

      data.consecutive_failure_count = consecutive_failure_count;

      // Compute what notifications to show
      // We'll take a diff and then give notifications for keys that
      // changed
      if (this.state.consecutive_failure_count) {
        this.state.consecutive_failure_count.forEach((v, key) => {
          if (!consecutive_failure_count.has(key)) {
            // It's fixed!
            new Notification("✅ " + this.props.job, {
              body: summarize_job(key),
            });
          }
        });
      }
      consecutive_failure_count.forEach((v, key) => {
        // Don't produce notifications for initial failure!
        if (
          this.state.consecutive_failure_count &&
          !this.state.consecutive_failure_count.has(key)
        ) {
          // It's failed!
          new Notification("❌ " + this.props.job, {
            body: summarize_job(key),
          });
        }
      });
    }

    // TODO: This can cause spurious state updates
    this.setState(data);
  }