in inference_pkg/src/intel_inference_eng.cpp [62:96]
InferenceEngine::InferRequest setMultiHeadModel(std::string artifactPath, const std::string &device,
InferenceEngine::Core core, std::vector<std::string> &inputNamesArr,
std::string &outputName, const InferenceEngine::Precision &inputPrec,
const InferenceEngine::Precision &outputPrec,
std::shared_ptr<rclcpp::Node> inferenceNode) {
RCLCPP_INFO(inferenceNode->get_logger(), "******* In setMultiHeadModel *******");
// Validate the artifact path.
auto strIdx = artifactPath.rfind('.');
if (strIdx == std::string::npos) {
throw InferenceExcept("Artifact missing file extension");
}
if (artifactPath.substr(strIdx+1) != "xml") {
throw InferenceExcept("No xml extension found");
}
auto network = core.ReadNetwork(artifactPath);
// Loop through the inputNamesArr and set the precision
for (const auto& pair : network.getInputsInfo()) {
if(pair.first.rfind(OBS) != std::string::npos
|| pair.first.rfind(LIDAR) != std::string::npos
|| pair.first.rfind(FRONT) != std::string::npos
|| pair.first.rfind(STEREO) != std::string::npos
|| pair.first.rfind(LEFT) != std::string::npos) {
inputNamesArr.push_back(pair.first);
pair.second->setPrecision(inputPrec);
}
}
auto outputInfo = network.getOutputsInfo().begin()->second;
outputName = network.getOutputsInfo().begin()->first;
outputInfo->setPrecision(outputPrec);
auto executableNetwork = core.LoadNetwork(network, device);
return executableNetwork.CreateInferRequest();
}