in inference_pkg/src/inference_node.cpp [111:135]
void LoadModelHdl(const std::shared_ptr<rmw_request_id_t> request_header,
std::shared_ptr<deepracer_interfaces_pkg::srv::LoadModelSrv::Request> req,
std::shared_ptr<deepracer_interfaces_pkg::srv::LoadModelSrv::Response> res) {
(void)request_header;
auto itInferTask = taskList_.find(req->task_type);
auto itPreProcess = preProcessList_.find(req->pre_process_type);
res->error = 1;
if (itInferTask != taskList_.end() && itPreProcess != preProcessList_.end()) {
switch(req->task_type) {
case rlTask:
itInferTask->second.reset(new IntelInferenceEngine::RLInferenceModel(this->shared_from_this(), "/sensor_fusion_pkg/sensor_msg"));
break;
case objDetectTask:
//! TODO add onject detection when class is implemented.
RCLCPP_ERROR(this->get_logger(), "Object detection not implemented");
return;
default:
RCLCPP_ERROR(this->get_logger(), "Unknown inference task");
return;
}
itInferTask->second->loadModel(req->artifact_path.c_str(), itPreProcess->second);
res->error = 0;
}
}