in source/calibration/FeatureMatcher.cpp [208:316]
Overlap findMatches(
const Image& img0,
const std::vector<Keypoint>& corners0,
const Camera& camera0,
const Image& img1,
const std::vector<Keypoint>& corners1,
const Camera& camera1) {
boost::timer::cpu_timer timer;
boost::timer::cpu_timer znccTimer;
znccTimer.stop();
boost::timer::cpu_timer projectCornerTimer;
projectCornerTimer.stop();
Image image1; // optimization: avoid reallocation by keeping this outside loop
// For each corner in corners0, compute its best and second best match in corners1. and vice versa
std::vector<BestMatch> bestMatches0(corners0.size());
std::vector<BestMatch> bestMatches1(corners1.size());
int callsToZncc = 0;
int callsToProjectCorners = 0;
for (ssize_t index0 = 0; index0 < ssize(corners0); index0++) {
LOG_IF(INFO, (FLAGS_threads == 0 || FLAGS_threads == 1) && (index0 % 1000) == 0)
<< "Processing feature " << index0 << " of " << corners0.size() << " from pair "
<< camera0.id << " " << camera1.id;
const Keypoint& corner0 = corners0[index0];
int depthSample = -1;
double disparity = 0;
cv::Rect2f box1(0, 0, 0, 0);
bool firstProjection = true;
while (getNextDepthSample(depthSample, disparity, box1, camera0, corner0.coords, camera1)) {
// only remap corner for sufficiently large disparities
if (firstProjection || disparity > 1 / FLAGS_max_depth_for_remap) {
// compute what the area around corner 0 would look like from camera 1
projectCornerTimer.resume();
callsToProjectCorners++;
if (!projectCorner(image1, camera1, img0, camera0, corner0, 1 / disparity)) {
continue;
}
projectCornerTimer.stop();
// don't match if we can't rediscover the corner after it has been reprojected
if (!hasCornerNearCenter(image1)) {
continue;
}
firstProjection = false;
}
Keypoint projection1(image1);
// look for a corner in c1 that is in the box and looks similar
znccTimer.resume();
for (ssize_t index1 = 0; index1 < ssize(corners1); ++index1) {
cv::Point2f cvCoords(corners1[index1].coords.x(), corners1[index1].coords.y());
if (!box1.contains(cvCoords)) {
continue;
}
double score = computeZncc(projection1, corners1[index1]);
bestMatches0[index0].updateCornerScore(score, index1);
bestMatches1[index1].updateCornerScore(score, index0);
callsToZncc++;
}
znccTimer.stop();
}
}
// Take match if both ends are strong and each other's best match
Overlap overlap(camera0.id, camera1.id);
for (const BestMatch& bestMatch0 : bestMatches0) {
if (bestMatch0.isWeakCorner()) {
continue;
}
const BestMatch& bestMatch1 = bestMatches1[bestMatch0.bestIdx];
if (bestMatch1.isWeakCorner()) {
continue;
}
if (&bestMatch0 != &bestMatches0[bestMatch1.bestIdx]) {
continue;
}
overlap.matches.emplace_back(bestMatch0.bestScore, bestMatch1.bestIdx, bestMatch0.bestIdx);
}
// Only report timing in single threaded mode
// In multithreaded mode these will clocks include time from other threads
// running simultaneously
if (FLAGS_enable_timing && FLAGS_threads == 1) {
LOG(INFO) << folly::sformat(
"{} and {} matching complete. Overlap fraction: {}. Matches: {}. Timing: {} "
"Calls to ZNCC: {}. ZNCC Time: {} "
"Calls to ProjectCorners: {}. Project Corner Time: {} ",
camera0.id,
camera1.id,
camera0.overlap(camera1),
overlap.matches.size(),
timer.format(),
callsToZncc,
znccTimer.format(),
callsToProjectCorners,
projectCornerTimer.format());
} else {
LOG(INFO) << folly::sformat(
"{} and {} matching complete. Overlap fraction: {}. Matches: {}",
camera0.id,
camera1.id,
camera0.overlap(camera1),
overlap.matches.size());
}
return overlap;
}