双目视觉项目的一部分,对两个摄像头采集到的图像surf提取特征点(共400个特征点,选取distance最小的50个)
详细过程翻阅OpenCV2 Computer Vision Application Programming Cookbook 第八章Detecting the scale-invariant SURF features和Describing SURF features部分
程序主题参考了http://blog.youkuaiyun.com/lee_cv/article/details/8804578
#include <iostream>
#include <fstream>
#include <cv.h>
#include <highgui.h>
#include <opencv2/nonfree/features2d.hpp>
#include "opencv2/core/core.hpp" //因为在属性中已经配置了opencv等目录,所以把其当成了本地目录一样
#include "opencv2/features2d/features2d.hpp"
#include <opencv2/legacy/legacy.hpp>
#include "opencv.hpp"
using namespace std;
int main()
{
const char LEFT_PATH[]= {"/home/liyuanzhe/two-view/code/opencv_test/left2.jpg"};
const char RIGHT_PATH[]= {"/home/liyuanzhe/two-view/code/opencv_test/right2.jpg"};
cv::Mat img_left = cv::imread(LEFT_PATH);
cv::Mat img_right = cv::imread(RIGHT_PATH);
cv::Mat img_left_trans = img_right;
IplImage* img_left_trans_c = cvLoadImage(LEFT_PATH);
if(!img_left.data || !img_right.data)//如果数据为空
{
cout<<"opencv error \n"<<endl;
return -1;
}
cout<<"open OK \n"<<endl;
cv::SurfFeatureDetector detector(400);
std::vector<cv::KeyPoint> keypoints_left,keypoints_right;//构造2个专门由点组成的点向量用来存储特征点
detector.detect(img_left,keypoints_left); //将img_left中的特征点放到keypoints-left中
detector.detect(img_right,keypoints_right);
cv::Mat img_keypoints_left=img_left; //创建新图像,用于画特征点
cv::Mat img_keypoints_right=img_right;
//将特征点keypoints—left画到img-left中
cv::drawKeypoints(img_left,keypoints_left,
img_keypoints_left,cv::Scalar::all(-1),
cv::DrawMatchesFlags::DEFAULT);
cv::drawKeypoints(img_right,keypoints_right,
img_keypoints_right,cv::Scalar::all(-1),
cv::DrawMatchesFlags::DEFAULT);
//计算特征向量
cv::SurfDescriptorExtractor extractor;//定义描述子对象
cv::Mat descriptors_left,descriptors_right;//存放特征向量的矩阵
extractor.compute(img_left,keypoints_left,descriptors_left);
extractor.compute(img_right,keypoints_right,descriptors_right);
//用burte force进行匹配特征向量
cv::BruteForceMatcher<cv::L2<float> >matcher;//定义一个burte force matcher对象
vector<cv::DMatch>matches;
matcher.match(descriptors_left,descriptors_right,matches);
//get 25matches with lowest distance
std::nth_element(matches.begin(),matches.begin()+49,matches.end());
matches.erase(matches.begin()+50,matches.end());
//绘制匹配线段
cv::Mat img_matches;
cv::drawMatches(img_left,keypoints_left,img_right,keypoints_right,matches,img_matches);//将匹配出来的结果放入内存img_matches中
cv::namedWindow("img_left");
cv::namedWindow("img_right");
cv::namedWindow("img_match");
cv::namedWindow("img_left_trans");
cv::imshow("img_left",img_keypoints_left);
cv::imshow("img_right",img_keypoints_right);
cv::imshow("img_match",img_matches);
cv::imshow("img_left_trans",img_left_trans);
cv::imwrite("/home/liyuanzhe/two-view/code/opencv_test/matches.jpg",img_matches);
cv::waitKey(0); return 0;}