in source/neo/eval.py [0:0]
def evaluate_inference_speed(self, repeat_times, neo_optimized=True):
"""
object detector's inference speed evaluation
:param repeat_times: repeating times for test
:param neo_optimized: whether to using sagemaker neo optimized model, boolean variable
:return: None
"""
image_data_for_raw_model, image_data_for_neo_optimized_model = self.get_model_input()
_, channels, height, width = image_data_for_raw_model.shape
if not neo_optimized:
# warmup
for i in range(100):
_ = self.eval_without_neo(x=image_data_for_raw_model)
# inference test
t_start = time.time()
for i in range(repeat_times):
_ = self.eval_without_neo(x=image_data_for_raw_model)
t_end = time.time()
print('[NEO Optimization Disabled] Time Cost per Frame (input size = 1x{}x{}x{}) = {} ms'.format(
channels, height, width, 1000 * (t_end - t_start) / repeat_times))
else:
# warmup
for i in range(100):
_ = self.eval_with_neo(x=image_data_for_neo_optimized_model)
t_start = time.time()
for i in range(repeat_times):
_ = self.eval_with_neo(x=image_data_for_neo_optimized_model)
t_end = time.time()
print('[NEO Optimization Enabled] Time Cost per Frame (input size = 1x{}x{}x{}) = {} ms'.format(
channels, height, width, 1000 * (t_end - t_start) / repeat_times))