in model_optimizer_pkg/model_optimizer_pkg/model_optimizer_node.py [0:0]
def optimize_tf_model(self,
model_name,
model_metadata_sensors,
training_algorithm,
input_width,
input_height,
lidar_channels,
aux_inputs={}):
"""Helper function to run Intel"s model optimizer for DeepRacer tensorflow model.
Args:
model_name (str): Model prefix, should be the same in the weight and symbol file.
model_metadata_sensors (list): List of sensor input types(int) for all the sensors
with which the model was trained.
training_algorithm (int): Training algorithm key(int) for the algorithm with which
the model was trained.
input_width (int): Width of the input image to the inference engine.
input_height (int): Height of the input image to the inference engine.
lidar_channels (int): Number of LiDAR values that with which the LiDAR head of
the model was trained.
aux_inputs (dict, optional): Dictionary of auxiliary options for the model optimizer.
Defaults to {}.
Raises:
Exception: Custom exception if the input height or width is less than 1.
Returns:
tuple: Tuple whose first value is the error code and second value
is a string to the location of the converted model if any.
"""
if input_width < 1 or input_height < 1:
raise Exception("Invalid height or width")
# Convert the API information into Intel model optimizer cli commands.
common_params = self.convert_to_mo_cli(model_name,
model_metadata_sensors,
training_algorithm,
input_width,
input_height,
lidar_channels,
aux_inputs)
# Tensor Flow specific parameters.
tf_params = {"--input_model_is_text": "",
"--offload_unsupported_operations_to_tf": "",
"--tensorflow_subgraph_patterns": "",
"-tensorflow_operation_patterns": "",
"--tensorflow_custom_operations_config_update": "",
"--tensorflow_use_custom_operations_config": ""}
# Add the correct file suffix.
common_params[constants.MOKeys.MODEL_PATH] += ".pbtxt" if "--input_model_is_text" in aux_inputs else ".pb"
return self.run_optimizer("mo_tf.py", common_params,
self.set_platform_param(tf_params, aux_inputs))