def handle_json_profiles_aggr()

in benchmark.py [0:0]


def handle_json_profiles_aggr(bazel_commits, project_source, project_commits,
                              runs, output_prefix, output_path, data_directory):
  """Aggregates the collected JSON profiles and writes the result to a CSV.

   Args:
    bazel_commits: the Bazel commits that bazel-bench ran on.
    project_source: a path/url to a local/remote repository of the project on
      which benchmarking was performed.
    project_commits: the commits of the project when benchmarking was done.
    runs: the total number of runs.
    output_prefix: the prefix to json profile filenames. Often the
      bazel-bench-uid.
    output_path: the path to the output csv file.
    data_directory: the directory that stores output files.
  """
  output_dir = os.path.dirname(output_path)
  if not os.path.exists(output_dir):
    os.makedirs(output_dir)

  with open(output_path, 'w') as f:
    csv_writer = csv.writer(f)
    csv_writer.writerow([
        'bazel_source', 'project_source', 'project_commit', 'cat', 'name', 'dur'
    ])

    for bazel_commit in bazel_commits:
      for project_commit in project_commits:
        profiles_filenames = [
            json_profile_filename(data_directory, output_prefix, bazel_commit,
                                  project_commit, i, runs)
            for i in range(1, runs + 1)
        ]
        event_list = json_profiles_merger_lib.aggregate_data(
            profiles_filenames, only_phases=True)
        for event in event_list:
          csv_writer.writerow([
              bazel_commit, project_source, project_commit, event['cat'],
              event['name'], event['median']
          ])
  logger.log('Finished writing aggregate_json_profiles to %s' % output_path)