in benchmarking/frameworks/framework_base.py [0:0]
def runBenchmark(self, info, benchmark, platform):
model = benchmark["model"]
tests = benchmark["tests"]
assert len(tests) == 1, (
"At this point, only one test should "
+ "exist in one benchmark. However, benchmark "
+ "{} doesn't.".format(benchmark["name"])
)
test = tests[0]
index = test["INDEX"] if "INDEX" in test else 0
first_iteration = index == 0
last_iteration = ("repeat" not in model) or (
"repeat" in model and index == model["repeat"] - 1
)
if self.host_platform is None:
self.host_platform = getHostPlatform(self.tempdir, self.args)
program_files = {
name: info["programs"][name]["location"] for name in info["programs"]
}
program_path = (
os.path.dirname(program_files["program"])
if "program" in program_files
else None
)
stringmap_from_info = info["string_map"] if "string_map" in info else None
self._replaceStringMap(benchmark, platform, program_path, stringmap_from_info)
# better to be before target program files separation.
# this way, in ios, the platform may not be copied to the target.
platform.preprocess(programs=program_files, benchmark=benchmark)
tgt_program_files, host_program_files = self._separatePrograms(
program_files, test.get("commands")
)
tgt_program_files = platform.copyFilesToPlatform(
tgt_program_files, copy_files=first_iteration
)
programs = {}
deepMerge(programs, host_program_files)
deepMerge(programs, tgt_program_files)
model_files = {
name: model["files"][name]["location"] for name in model["files"]
}
if "converter" in model:
converter = model["converter"]
assert "name" in converter, "converter field must have a name"
assert converter["name"] in self.converters, "Unknown converter {}".format(
converter
)
else:
converter = None
output = {}
# inject default parameters into test
if "iter" not in test:
test["iter"] = -1
# overall preprocess
if "preprocess" in model and first_iteration:
commands = model["preprocess"]["commands"]
self._runCommands(
output,
commands,
self.host_platform,
programs,
model,
None,
model_files,
None,
None,
None,
None,
-1,
converter,
)
input_files = (
{
name: test["input_files"][name]["location"]
for name in test["input_files"]
}
if "input_files" in test
else None
)
test_files = (
{name: test["files"][name]["location"] for name in test["files"]}
if "files" in test
else {}
)
# Let's handle preprocess comamnd first,
# since we will copy all files into host
if "preprocess" in test:
# simple thing first, let's assume preprocess is self contained
# check the program to executable
if (
"files" in test["preprocess"]
and "program" in test["preprocess"]["files"]
):
host_program_path = test["preprocess"]["files"]["program"]["location"]
os.chmod(host_program_path, 0o777)
# will deprecate in the future
if "files" in test["preprocess"]:
preprocess_files = {
name: test["preprocess"]["files"][name]["location"]
for name in test["preprocess"]["files"]
}
deepMerge(test_files, preprocess_files)
if "commands" in test["preprocess"]:
commands = test["preprocess"]["commands"]
elif "command" in test["preprocess"]:
commands = [test["preprocess"]["command"]]
self._runCommands(
output,
commands,
self.host_platform,
programs,
model,
test,
model_files,
input_files,
None,
None,
test_files,
-1,
converter,
)
tgt_input_files = (
platform.copyFilesToPlatform(input_files) if input_files else None
)
shared_libs = None
if "shared_libs" in info:
shared_libs = platform.copyFilesToPlatform(
info["shared_libs"], copy_files=first_iteration
)
tgt_model_files = platform.copyFilesToPlatform(
model_files, copy_files=first_iteration
)
tgt_result_files = None
if "output_files" in test:
tgt_result_files = {
name: test["output_files"][name]["location"]
for name in test["output_files"]
}
total_num = test["iter"]
if "platform_args" in test:
platform_args = test["platform_args"]
elif "platform_args" in model:
platform_args = model["platform_args"]
else:
platform_args = {}
if "timeout" in model:
platform_args["timeout"] = model["timeout"]
if "timeout" in test:
platform_args["timeout"] = test["timeout"]
program = programs["program"] if "program" in programs else ""
if test["metric"] == "power":
platform_args["power"] = True
method = test.get("method")
platform_args["method"] = method
if method == "software":
power_util = software_power.PowerUtil(
platform, test.get("collection_time", 300)
)
else:
# FIXME "Monsoon" was unimportable
from utils.monsoon_power import collectPowerData
# in power metric, the output is ignored
total_num = 0
platform.killProgram(program)
if test.get("env", False):
platform_args["env"] = test["env"]
if platform.getType() == "host":
# Fix the number of threads
if not platform_args.get("env", False):
platform_args["env"] = {}
MKL_NUM_THREADS = test.get("MKL_NUM_THREADS", 1)
OMP_NUM_THREADS = test.get("OMP_NUM_THREADS", 1)
if MKL_NUM_THREADS > 0:
platform_args["env"]["MKL_NUM_THREADS"] = MKL_NUM_THREADS
if OMP_NUM_THREADS > 0:
platform_args["env"]["OMP_NUM_THREADS"] = OMP_NUM_THREADS
# Randomly select one cpu core from logic cpu #4 to #13.
cpu_core = test.get("cpu-list", random.randint(5, 14))
if isinstance(test["commands"], list) and cpu_core > 0:
test["commands"][-1] = " ".join(
["taskset", "--cpu-list", str(cpu_core), test["commands"][-1]]
)
self._runCommands(
output,
test["commands"],
platform,
programs,
model,
test,
tgt_model_files,
tgt_input_files,
tgt_result_files,
shared_libs,
test_files,
total_num,
converter,
platform_args=platform_args,
main_command=True,
)
if test["metric"] == "power":
if test.get("method") == "software":
output = power_util.collect()
else:
collection_time = (
test["collection_time"] if "collection_time" in test else 180
)
voltage = float(test["voltage"]) if "voltage" in test else 4.0
output = collectPowerData(
platform.platform_hash,
collection_time,
voltage,
test["iter"],
self.args.monsoon_map,
)
platform.waitForDevice(20)
# kill the process if exists
platform.killProgram(program)
# remove the files before copying out the output files
# this will save some time in ios platform, since in ios
# all files are copied back to the host system
if len(output) > 0:
if input_files is not None:
platform.delFilesFromPlatform(tgt_input_files)
if last_iteration:
platform.delFilesFromPlatform(tgt_model_files)
platform.delFilesFromPlatform(tgt_program_files)
if shared_libs is not None:
platform.delFilesFromPlatform(shared_libs)
output_files = None
if "output_files" in test:
target_dir = os.path.join(self.tempdir, "output")
shutil.rmtree(target_dir, True)
os.makedirs(target_dir)
output_files = platform.moveFilesFromPlatform(tgt_result_files, target_dir)
platform.postprocess()
if "postprocess" in test:
if (
"files" in test["postprocess"]
and "program" in test["preprocess"]["files"]
):
host_program_path = test["postprocess"]["files"]["program"]["location"]
os.chmod(host_program_path, 0o777)
# will deprecate in the future
if "files" in test["postprocess"]:
postprocess_files = {
name: test["postprocess"]["files"][name]["location"]
for name in test["postprocess"]["files"]
}
deepMerge(test_files, postprocess_files)
commands = None
if "commands" in test["postprocess"]:
commands = test["postprocess"]["commands"]
elif "command" in test["postprocess"]:
commands = [test["postprocess"]["command"]]
self._runCommands(
output,
commands,
self.host_platform,
programs,
model,
test,
model_files,
input_files,
output_files,
None,
test_files,
-1,
converter,
)
if "postprocess" in model and last_iteration:
commands = model["postprocess"]["commands"]
self._runCommands(
output,
commands,
self.host_platform,
programs,
model,
test,
model_files,
None,
None,
None,
None,
-1,
converter,
)
# after everything is done, some of the output files may
# contain metrics that can be processed. Those files have
# field converter, and specify which convert to use to
# convert the metrics
if output_files:
to_upload = {}
for filename in output_files:
file = output_files[filename]
output_file_spec = test["output_files"][filename]
# if files should be uploaded, upload and add location to meta data.
if output_file_spec.get("upload", False):
to_upload.update({filename: file})
# if output_file can be converted for data, convert and merge output.
converter = output_file_spec.get("converter")
if not converter:
continue
assert "name" in converter, "converter field must have a name"
assert (
converter["name"] in self.converters
), "Unknown converter {}".format(converter["name"])
converter_class = self.converters[converter["name"]]
args = converter.get("args")
with open(file, "r") as f:
content = f.read()
convert = converter_class()
results, _ = convert.collect(content, args)
one_output = convert.convert(results)
deepMerge(output, one_output)
if to_upload:
output_file_uploader = FileUploader("output_files").get_uploader()
output_file_meta = {}
for filename, file in to_upload.items():
try:
getLogger().info(f"Uploading {filename} ({file}) to manifold")
url = output_file_uploader.upload_file(file)
output_file_meta.update({filename: url})
getLogger().info(f"{file} uploaded to {url}")
except Exception:
getLogger().exception(
f"Could not upload output file {file}. Skipping."
)
if output_file_meta:
output["meta"].update({"output_files": output_file_meta})
return output, output_files