in mozetl/bhr_collection/bhr_collection.py [0:0]
def ingest_processed_profile(self, profile):
for existing_thread in self.thread_table.get_items():
prune_stack_cache = UniqueKeyedTable(lambda key: [0.0])
prune_stack_cache.key_to_index(("(root)", None, None))
existing_thread["pruneStackCache"] = prune_stack_cache
sample_size = self.config["post_sample_size"]
threads = profile["threads"]
for other in threads:
other_samples = other["sampleTable"]
other_dates = other["dates"]
for date in other_dates:
build_date = date["date"]
for i in range(0, len(date["sampleHangCount"])):
stack_index = other_samples["stack"][i]
stack = reconstruct_stack(
other["stringArray"],
other["funcTable"],
other["stackTable"],
other["libs"],
stack_index,
)
self.pre_ingest_row(
(
stack,
other["stringArray"][other_samples["runnable"][i]],
other["name"],
build_date,
other_samples["annotations"][i],
other["stringArray"][other_samples["platform"][i]],
date["sampleHangMs"][i],
date["sampleHangCount"][i],
)
)
for date in other_dates:
build_date = date["date"]
for i in range(0, len(date["sampleHangCount"])):
stack_index = other_samples["stack"][i]
stack = reconstruct_stack(
other["stringArray"],
other["funcTable"],
other["stackTable"],
other["libs"],
stack_index,
)
if sample_size == 1.0 or random.random() <= sample_size:
self.ingest_row(
(
stack,
other["stringArray"][other_samples["runnable"][i]],
other["name"],
build_date,
other_samples["annotations"][i],
other["stringArray"][other_samples["platform"][i]],
date["sampleHangMs"][i],
date["sampleHangCount"][i],
)
)
self.usage_hours_by_date = merge_number_dicts(
self.usage_hours_by_date, profile.get("usageHoursByDate", {})
)