in inference_pkg/src/intel_inference_eng.cpp [247:308]
void RLInferenceModel::sensorCB(const deepracer_interfaces_pkg::msg::EvoSensorMsg::SharedPtr msg) {
if(!doInference_) {
return;
}
try {
for(size_t i = 0; i < inputNamesArr_.size(); ++i) {
auto inputPtr = inferRequest_.GetBlob(inputNamesArr_[i])->buffer().as<InferenceEngine::PrecisionTrait<InferenceEngine::Precision::FP32>::value_type *>();
// Object that will hold the data sent to the inference engine post processed.
cv::Mat retData;
if (inputNamesArr_[i].find(STEREO) != std::string::npos)
{
loadStereoImg<cv::Vec2b, float>(inputPtr, retData, imgProcess_, msg->images, paramsArr_[i]);
}
else if (inputNamesArr_[i].find(FRONT) != std::string::npos
|| inputNamesArr_[i].find(LEFT) != std::string::npos
|| inputNamesArr_[i].find(OBS) != std::string::npos) {
load1DImg<uchar, float>(inputPtr, retData, imgProcess_, msg->images.front(), paramsArr_[i]);
}
else if (inputNamesArr_[i].find(LIDAR) != std::string::npos){
loadLidarData(inputPtr, msg->lidar_data);
}
else {
RCLCPP_ERROR(inferenceNode->get_logger(), "Invalid input head");
return;
}
imgProcess_->reset();
}
// Do inference
inferRequest_.Infer();
auto output = inferRequest_.GetBlob(outputName_);
// Package the results and publish to all subscribers.
auto outputDims = output->getTensorDesc().getDims();
auto outputData = output->buffer().as<InferenceEngine::PrecisionTrait<InferenceEngine::Precision::FP32>::value_type*>();
auto inferMsg = deepracer_interfaces_pkg::msg::InferResultsArray();
for (size_t i = 0; i < msg->images.size(); ++i) {
// Send the image data over with the results
inferMsg.images.push_back(msg->images[i]) ;
}
for (size_t label = 0; label < outputDims[1]; ++label) {
auto inferData = deepracer_interfaces_pkg::msg::InferResults();
inferData.class_label = label;
inferData.class_prob = outputData[label];
// Set bounding box data to -1 to indicate to subscribers that this model offers no
// localization information.
inferData.x_min = -1.0;
inferData.y_min = -1.0;
inferData.x_max = -1.0;
inferData.y_max = -1.0;
inferMsg.results.push_back(inferData);
}
// Send results to all subscribers.
resultPub_->publish(inferMsg);
}
catch (const std::exception &ex) {
RCLCPP_ERROR(inferenceNode->get_logger(), "Inference failed %s", ex.what());
}
}