in report/generate_report.py [0:0]
def _generate_report_for_date(project, date, storage_bucket, report_name,
upload_report, bq_project, bq_table):
"""Generates a html report for the specified date & project.
Args:
project: the project to generate report for. Check out bazel_bench.py.
date: the date to generate report for.
storage_bucket: the Storage bucket to fetch data from/upload the report to.
report_name: the name of the report on GS.
upload_report: whether to upload the report to GCS.
bq_project: the BigQuery project.
bq_table: the BigQuery table.
"""
dated_subdir = _get_dated_subdir_for_project(project, date)
bq_date_cutoff = (date + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
root_storage_url = _get_storage_url(storage_bucket, dated_subdir)
metadata_file_url = "{}/METADATA".format(root_storage_url)
metadata = _load_json_from_remote_file(metadata_file_url)
graph_components = []
raw_files_components = []
graph_components.append(_uncollapse_button("commits", "Show commits"))
graph_components.append(
_row_component(
_col_component(
"col-sm-10",
_commits_component(metadata["all_commits"],
metadata["benchmarked_commits"]))))
for platform_measurement in sorted(
metadata["platforms"], key=lambda k: k["platform"]):
# Get the data
performance_data = _load_csv_from_remote_file("{}/{}".format(
root_storage_url, platform_measurement["perf_data"]))
aggr_json_profile = _load_csv_from_remote_file("{}/{}".format(
root_storage_url, platform_measurement["aggr_json_profiles"]))
summary_text = _load_txt_from_remote_file("{}/{}".format(
root_storage_url,
platform_measurement["perf_data"].replace(".csv", ".txt")))
wall_data, memory_data = _prepare_data_for_graph(performance_data,
aggr_json_profile)
platform = platform_measurement["platform"]
historical_wall_data, historical_mem_data = _prepare_time_series_data(
_query_bq(bq_project, bq_table, metadata["project_source"],
bq_date_cutoff, platform))
# Generate a graph for that platform.
row_content = []
row_content.append(
_col_component(
"col-sm-6",
_single_graph(
metric="wall",
metric_label="Wall Time (s)",
data=wall_data,
platform=platform,
median_series=len(EVENTS_ORDER))))
row_content.append(
_col_component(
"col-sm-6",
_historical_graph(
metric="wall",
metric_label="Wall Time (s)",
data=historical_wall_data,
platform=platform,
color="#dd4477")))
row_content.append(
_col_component(
"col-sm-6",
_single_graph(
metric="memory",
metric_label="Memory (MB)",
data=memory_data,
platform=platform,
)))
row_content.append(
_col_component(
"col-sm-6",
_historical_graph(
metric="memory",
metric_label="Memory (MB)",
data=historical_mem_data,
platform=platform,
color="#3366cc")))
row_content.append(
_col_component(
"col-sm-12",
_uncollapse_button("summary-{}".format(platform),
"Show Summary Table")))
row_content.append(
_col_component("col-sm-12",
_summary_table(content=summary_text, platform=platform)))
graph_components.append(
_row_component(
_col_component(
"col-sm-5",
'<h2 class="underlined">{}</h2></hr>'.format(platform))))
raw_files_components.append(
_uncollapse_button("raw_files_%s" % platform,
"Show raw files for %s" % platform))
raw_files_components.append(
_row_component(
_col_component(
"col-sm-10",
_get_file_list_component(storage_bucket, dated_subdir,
platform))))
graph_components.append(_row_component("\n".join(row_content)))
content = _full_report(
project,
metadata["project_source"],
date,
command=metadata["command"],
graph_components="\n".join(graph_components),
raw_files_components="\n".join(raw_files_components))
if not os.path.exists(REPORTS_DIRECTORY):
os.makedirs(REPORTS_DIRECTORY)
report_tmp_file = "{}/report_{}_{}.html".format(REPORTS_DIRECTORY, project,
date.strftime("%Y%m%d"))
with open(report_tmp_file, "w") as fo:
fo.write(content)
if upload_report:
_upload_to_storage(report_tmp_file, storage_bucket,
dated_subdir + "/{}.html".format(report_name))
else:
print(content)