in deepracer_follow_the_leader_ws/webserver_pkg/webserver_pkg/models.py [0:0]
def model_file_upload():
"""API to call the service to upload models to the artifacts directory.
Returns:
dict: Execution status if the API call was successful and the response message.
"""
webserver_node = webserver_publisher_node.get_webserver_node()
#
# Check if the file received is a tar.gz
#
file_obj = request.files["file"]
file_name = file_obj.filename
secured_file = secure_filename(file_name)
if file_name.endswith(".tar.gz"):
folder_name = file_name[:-7]
else:
return jsonify({"success": False,
"message": "Failed to upload the model. Not a .tar.gz file"})
# Always create a new directory. If the folder already exists.
# Delete and Create a new one.
dir_path = os.path.join(constants.MODEL_DIRECTORY_PATH, folder_name)
if os.path.exists(dir_path):
shutil.rmtree(dir_path)
os.makedirs(dir_path)
# Save the uploaded file in the artifacts directory.
webserver_node.get_logger().info("Uploaded model file: {}".format(file_name))
file_obj.save(os.path.join(dir_path, secured_file))
# Optimizing the model once the file is uploaded.
# Converting the .tar.gz to optimized inference model.
upload_model_req = ConsoleModelActionSrv.Request()
upload_model_req.model_path = dir_path
# action=1 (For upload the model) & action=0 for deleting the model
upload_model_req.action = 1
upload_model_res = call_service_sync(webserver_node.model_action_cli, upload_model_req)
if upload_model_res:
webserver_node.get_logger().info(f"Uploaded model status return {upload_model_res.status}")
if upload_model_res.status == "done-upload":
return jsonify({"success": True,
"message": "Model uploaded successfully to your vehicle"})
return jsonify({"success": False,
"message": "Failed to upload & optimize the model"})