in report/generate_master_report.py [0:0]
def _generate_report_for_date(date, storage_bucket, report_name, upload_report,
bq_project, bq_table):
"""Generates a html report for the specified date & project.
Args:
date: the date to generate report for.
storage_bucket: the Storage bucket to fetch data from/upload the report to.
report_name: the name of the report on GS.
upload_report: whether to upload the report to GCS.
bq_project: the BigQuery project.
bq_table: the BigQuery table.
"""
bq_date_cutoff = (date + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
graph_components = []
projects = set()
for platform in PLATFORMS:
historical_wall_data, historical_mem_data, platform_projects = _prepare_time_series_data(
_query_bq(bq_project, bq_table, bq_date_cutoff, platform))
projects = projects.union(set(platform_projects))
# Generate a graph for that platform.
row_content = []
row_content.append(
_col_component(
"col-sm-6",
_historical_graph(
metric="wall",
metric_label="Wall Time (s)",
data=historical_wall_data,
platform=platform,
)))
row_content.append(
_col_component(
"col-sm-6",
_historical_graph(
metric="memory",
metric_label="Memory (MB)",
data=historical_mem_data,
platform=platform,
)))
graph_components.append(_row_component("\n".join(row_content)))
content = _full_report(
date,
graph_components="\n".join(graph_components),
project_reports_components=_project_reports_components(date, projects))
if not os.path.exists(REPORTS_DIRECTORY):
os.makedirs(REPORTS_DIRECTORY)
report_tmp_file = "{}/report_master_{}.html".format(REPORTS_DIRECTORY,
date.strftime("%Y%m%d"))
with open(report_tmp_file, "w") as fo:
fo.write(content)
if upload_report:
_upload_to_storage(
report_tmp_file, storage_bucket,
"all/{}/{}.html".format(date.strftime("%Y/%m/%d"), report_name))
else:
print(content)