KAZE与AKAZE都是OpenCv中集成的API算法,AKAZE是KAZE的加速版本(A表示Accelerated(加速的))。且AKAZE的执行效率要优于KAZE。
与SURF和SIFT不同(基于线性),KAZE(AKAZE)是基于非线性插值的方法,这一点在图像处理方面来说确实比SURF和SIFT要好,毕竟图像大多是不连续的。
KAZE(AKAZE)基本步骤与介绍:
1、AOS(加性算子分裂算法(Additive Operator Splitting, AOS))构造尺度空间;------非线性方法构造,SURF与SIFT基于线性方法构造。
2、Hessian矩阵特征点检测;
3、方向基于一阶微分图像;------图像的旋转不变性(旋转角度)。
4、描述子生成。---------归一化处理。
与SIFT和SURF的比较:
1、更加稳定;
2、非线性尺度空间;
3、AKAZE比KAZE素的更快;
具体的算法分析和原理可参考最后链接
//31 AKAZE
void StartOp2::ImageProcess2_31()
{
Mat src = imread("../../Images/22.jpg", IMREAD_GRAYSCALE);
if (!src.data) {
}
imshow("image1", src);
// akaze detection
//Ptr<KAZE> detector = KAZE::create();
Ptr<AKAZE> detector = AKAZE::create();
vector<KeyPoint> keypoints;
double t1 = getTickCount();
detector->detect(src, keypoints, Mat());
double t2 = getTickCount();
double tkaze = 1000 * (t2 - t1) / getTickFrequency();
printf("KAZE Time consume(ms) : %f", tkaze);
Mat keypointImg;
drawKeypoints(src, keypoints, keypointImg, Scalar::all(-1), DrawMatchesFlags::DEFAULT);
imshow("kaze key points", keypointImg);
//AKAZE match
Mat img1 = imread("../../Images/22.jpg", IMREAD_GRAYSCALE);
Mat img2 = imread("../../Images/23.jpg", IMREAD_GRAYSCALE);
if (img1.empty() || img2.empty()) {
printf("could not load images...\n");
}
//imshow("box image", img1);
//imshow("scene image", img2);
// extract akaze features
Ptr<AKAZE> detector1 = AKAZE::create();
vector<KeyPoint> keypoints_obj;
vector<KeyPoint> keypoints_scene;
Mat descriptor_obj, descriptor_scene;
//double t1 = getTickCount();
detector1->detectAndCompute(img1, Mat(), keypoints_obj, descriptor_obj);
detector1->detectAndCompute(img2, Mat(), keypoints_scene, descriptor_scene);
//double t2 = getTickCount();
//double tkaze = 1000 * (t2 - t1) / getTickFrequency();
//printf("AKAZE Time consume(ms) : %f\n", tkaze);
// matching
FlannBasedMatcher matcher(new flann::LshIndexParams(20, 10, 2));
//FlannBasedMatcher matcher;
vector<DMatch> matches;
matcher.match(descriptor_obj, descriptor_scene, matches);
// draw matches(key points)
Mat akazeMatchesImg;
drawMatches(img1, keypoints_obj, img2, keypoints_scene, matches, akazeMatchesImg);
imshow("akaze match result", akazeMatchesImg);
//draw good matches
vector<DMatch> goodMatches;
double minDist = 100000, maxDist = 0;
for (int i = 0; i < descriptor_obj.rows; i++) {
double dist = matches[i].distance;
if (dist < minDist) {
minDist = dist;
}
if (dist > maxDist) {
maxDist = dist;
}
}
printf("min distance : %f", minDist);
for (int i = 0; i < descriptor_obj.rows; i++) {
double dist = matches[i].distance;
if (dist < max( 1.5*minDist, 0.02)) {
goodMatches.push_back(matches[i]);
}
}
drawMatches(img1, keypoints_obj, img2, keypoints_scene, goodMatches, akazeMatchesImg, Scalar::all(-1),
Scalar::all(-1), vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
imshow("good match result", akazeMatchesImg);
}
参考:
https://blog.youkuaiyun.com/chenyusiyuan/article/details/8710462
http://www.pianshen.com/article/9621127314/