Surf

//RobustMatcher class taken from OpenCV2 Computer Vision Application Programming Cookbook Ch 9
classRobustMatcher {
  private:
     // pointer to the feature point detector object
     cv::Ptr<cv::FeatureDetector> detector;
     // pointer to the feature descriptor extractor object
     cv::Ptr<cv::DescriptorExtractor> extractor;
     // pointer to the matcher object
     cv::Ptr<cv::DescriptorMatcher > matcher;
     floatratio; // max ratio between 1st and 2nd NN
     boolrefineF; // if true will refine the F matrix
     doubledistance; // min distance to epipolar
     doubleconfidence; // confidence level (probability)
  public:
     RobustMatcher() : ratio(0.65f), refineF(true),
                       confidence(0.99), distance(3.0) {
        // ORB is the default feature
        detector=newcv::OrbFeatureDetector();
        extractor=newcv::OrbDescriptorExtractor();
        matcher=newcv::BruteForceMatcher<cv::HammingLUT>;
     }
 
  // Set the feature detector
  voidsetFeatureDetector(
         cv::Ptr<cv::FeatureDetector>& detect) {
     detector= detect;
  }
  // Set the descriptor extractor
  voidsetDescriptorExtractor(
         cv::Ptr<cv::DescriptorExtractor>& desc) {
     extractor= desc;
  }
  // Set the matcher
  voidsetDescriptorMatcher(
         cv::Ptr<cv::DescriptorMatcher>& match) {
     matcher= match;
  }
  // Set confidence level
  voidsetConfidenceLevel(
         doubleconf) {
     confidence= conf;
  }
  //Set MinDistanceToEpipolar
  voidsetMinDistanceToEpipolar(
         doubledist) {
     distance= dist;
  }
  //Set ratio
  voidsetRatio(
         floatrat) {
     ratio= rat;
  }
 
  // Clear matches for which NN ratio is > than threshold
  // return the number of removed points
  // (corresponding entries being cleared,
  // i.e. size will be 0)
  intratioTest(std::vector<std::vector<cv::DMatch> >
                                               &matches) {
    intremoved=0;
      // for all matches
    for(std::vector<std::vector<cv::DMatch> >::iterator
             matchIterator= matches.begin();
         matchIterator!= matches.end(); ++matchIterator) {
           // if 2 NN has been identified
           if(matchIterator->size() > 1) {
               // check distance ratio
               if((*matchIterator)[0].distance/
                   (*matchIterator)[1].distance > ratio) {
                  matchIterator->clear();// remove match
                  removed++;
               }
           }else{ // does not have 2 neighbours
               matchIterator->clear();// remove match
               removed++;
           }
    }
    returnremoved;
  }
 
  // Insert symmetrical matches in symMatches vector
  voidsymmetryTest(
      conststd::vector<std::vector<cv::DMatch> >& matches1,
      conststd::vector<std::vector<cv::DMatch> >& matches2,
      std::vector<cv::DMatch>& symMatches) {
    // for all matches image 1 -> image 2
    for(std::vector<std::vector<cv::DMatch> >::
             const_iterator matchIterator1= matches1.begin();
         matchIterator1!= matches1.end(); ++matchIterator1) {
       // ignore deleted matches
       if(matchIterator1->size() < 2)
           continue;
       // for all matches image 2 -> image 1
       for(std::vector<std::vector<cv::DMatch> >::
          const_iterator matchIterator2= matches2.begin();
           matchIterator2!= matches2.end();
           ++matchIterator2) {
           // ignore deleted matches
           if(matchIterator2->size() < 2)
              continue;
           // Match symmetry test
           if((*matchIterator1)[0].queryIdx ==
               (*matchIterator2)[0].trainIdx &&
               (*matchIterator2)[0].queryIdx ==
               (*matchIterator1)[0].trainIdx) {
               // add symmetrical match
                 symMatches.push_back(
                   cv::DMatch((*matchIterator1)[0].queryIdx,
                             (*matchIterator1)[0].trainIdx,
                             (*matchIterator1)[0].distance));
                 break;// next match in image 1 -> image 2
           }
       }
    }
  }
 
  // Identify good matches using RANSAC
  // Return fundemental matrix
  cv::Mat ransacTest(
      conststd::vector<cv::DMatch>& matches,
      conststd::vector<cv::KeyPoint>& keypoints1,
      conststd::vector<cv::KeyPoint>& keypoints2,
      std::vector<cv::DMatch>& outMatches) {
   // Convert keypoints into Point2f
   std::vector<cv::Point2f> points1, points2;
   cv::Mat fundemental;
   for(std::vector<cv::DMatch>::
         const_iterator it= matches.begin();
       it!= matches.end(); ++it) {
       // Get the position of left keypoints
       floatx= keypoints1[it->queryIdx].pt.x;
       floaty= keypoints1[it->queryIdx].pt.y;
       points1.push_back(cv::Point2f(x,y));
       // Get the position of right keypoints
       x= keypoints2[it->trainIdx].pt.x;
       y= keypoints2[it->trainIdx].pt.y;
       points2.push_back(cv::Point2f(x,y));
    }
   // Compute F matrix using RANSAC
   std::vector<uchar> inliers(points1.size(),0);
   if(points1.size()>0&&points2.size()>0){
      cv::Mat fundemental= cv::findFundamentalMat(
         cv::Mat(points1),cv::Mat(points2),// matching points
          inliers,      // match status (inlier or outlier)
          CV_FM_RANSAC,// RANSAC method
          distance,     // distance to epipolar line
          confidence);// confidence probability
      // extract the surviving (inliers) matches
      std::vector<uchar>::const_iterator
                         itIn= inliers.begin();
      std::vector<cv::DMatch>::const_iterator
                         itM= matches.begin();
      // for all matches
      for( ;itIn!= inliers.end(); ++itIn, ++itM) {
         if(*itIn) { // it is a valid match
             outMatches.push_back(*itM);
          }
       }
       if(refineF) {
       // The F matrix will be recomputed with
       // all accepted matches
          // Convert keypoints into Point2f
          // for final F computation
          points1.clear();
          points2.clear();
          for(std::vector<cv::DMatch>::
                 const_iterator it= outMatches.begin();
              it!= outMatches.end(); ++it) {
              // Get the position of left keypoints
              floatx= keypoints1[it->queryIdx].pt.x;
              floaty= keypoints1[it->queryIdx].pt.y;
              points1.push_back(cv::Point2f(x,y));
              // Get the position of right keypoints
              x= keypoints2[it->trainIdx].pt.x;
              y= keypoints2[it->trainIdx].pt.y;
              points2.push_back(cv::Point2f(x,y));
          }
          // Compute 8-point F from all accepted matches
          if(points1.size()>0&&points2.size()>0){
             fundemental= cv::findFundamentalMat(
                cv::Mat(points1),cv::Mat(points2),// matches
                CV_FM_8POINT);// 8-point method
          }
       }
    }
    returnfundemental;
  }
 
  // Match feature points using symmetry test and RANSAC
  // returns fundemental matrix
  cv::Mat match(cv::Mat& image1,
                cv::Mat& image2, // input images
     // output matches and keypoints
     std::vector<cv::DMatch>& matches,
     std::vector<cv::KeyPoint>& keypoints1,
     std::vector<cv::KeyPoint>& keypoints2) {
   // 1a. Detection of the SURF features
   detector->detect(image1,keypoints1);
   detector->detect(image2,keypoints2);
   // 1b. Extraction of the SURF descriptors
   cv::Mat descriptors1, descriptors2;
   extractor->compute(image1,keypoints1,descriptors1);
   extractor->compute(image2,keypoints2,descriptors2);
   // 2. Match the two image descriptors
   // Construction of the matcher
   //cv::BruteForceMatcher<cv::L2<float>> matcher;
   // from image 1 to image 2
   // based on k nearest neighbours (with k=2)
   std::vector<std::vector<cv::DMatch> > matches1;
   matcher->knnMatch(descriptors1,descriptors2,
       matches1,// vector of matches (up to 2 per entry)
       2);       // return 2 nearest neighbours
    // from image 2 to image 1
    // based on k nearest neighbours (with k=2)
    std::vector<std::vector<cv::DMatch> > matches2;
    matcher->knnMatch(descriptors2,descriptors1,
       matches2,// vector of matches (up to 2 per entry)
       2);       // return 2 nearest neighbours
    // 3. Remove matches for which NN ratio is
    // > than threshold
    // clean image 1 -> image 2 matches
    intremoved= ratioTest(matches1);
    // clean image 2 -> image 1 matches
    removed= ratioTest(matches2);
    // 4. Remove non-symmetrical matches
    std::vector<cv::DMatch> symMatches;
    symmetryTest(matches1,matches2,symMatches);
    // 5. Validate matches using RANSAC
    cv::Mat fundemental= ransacTest(symMatches,
                keypoints1, keypoints2, matches);
    // return the found fundemental matrix
    returnfundemental;
  }
};
 
 
// set parameters
 
intnumKeyPoints = 1500;
 
//Instantiate robust matcher
 
RobustMatcher rmatcher;
 
//instantiate detector, extractor, matcher
 
detector = newcv::OrbFeatureDetector(numKeyPoints);
extractor = newcv::OrbDescriptorExtractor;
matcher = newcv::BruteForceMatcher<cv::HammingLUT>;
 
rmatcher.setFeatureDetector(detector);
rmatcher.setDescriptorExtractor(extractor);
rmatcher.setDescriptorMatcher(matcher);
 
//Load input image detect keypoints
 
cv::Mat img1;
std::vector<cv::KeyPoint> img1_keypoints;
cv::Mat img1_descriptors;
cv::Mat img2;
std::vector<cv::KeyPoint> img2_keypoints
cv::Mat img2_descriptors;
std::vector<std::vector<cv::DMatch> > matches;
img1 = cv::imread(fList[0].string(), CV_LOAD_IMAGE_GRAYSCALE);
img2 = cv::imread(fList[1].string(), CV_LOAD_IMAGE_GRAYSCALE);
 
rmatcher.match(img1, img2, matches, img1_keypoints, img2_keypoints);
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值