Android上使用ASIFT实现对视角变化更鲁棒的特征匹配

本文介绍如何在Android平台上利用ASIFT技术实现图像识别,并提供从彩图转换为灰度图的方法及完整的代码示例。

本文来自http://blog.youkuaiyun.com/hellogv/ ,引用必须注明出处!

今晚是平安夜,跟众多四眼技术宅一样,这个时候还是跟电脑过节......

上次讲解了在Android上通过NDK把彩图转换为灰度图,现在可以把WindowsMobile版的ASIFT 例子移植到Android上了.......在这里还是要再次感谢Jean-Michel Morel和Guoshen Yu两位大牛的无私奉献,尊重知识尊重开源精神。

先来看看本文程序运行截图:

左图是设定识别率为最低的结果,右图是设定识别率为较低的结果。

本文的代码可以到这里下载:http://www.pudn.com/downloads314/sourcecode/comm/android/detail1391871.html

这里ASIFT的NDK代码(C++)跟WM篇的DLL代码大体一样,不过也存在一些不同:

1、JNI不支持引用传递,所以有些值必须通过函数返回,例如:

/** * 取得放大/缩小之后的图像大小 */ JNIEXPORT jintArray JNICALL Java_com_testASIFT_LibASIFT_GetZoomSize( JNIEnv* env, jobject obj) { jint arrint[2]; arrint[0] = IM_X; arrint[1] = IM_Y; jintArray result = env->NewIntArray(2); env->SetIntArrayRegion(result, 0, 2, arrint); return result; } /** * 返回匹配后图像的大小 jintArray[0]为width, jintArray[1]为height */ JNIEXPORT jintArray JNICALL Java_com_testASIFT_LibASIFT_GetMatchedImageSize( JNIEnv* env, jobject obj) { jint arrint[2]; arrint[0] = wo; arrint[1] = ho; jintArray result = env->NewIntArray(2); env->SetIntArrayRegion(result, 0, 2, arrint); return result; }

2、ASIFT接受的是8bit的灰度图,使用前要转换为8bit的灰度图:

void PixelToVector(jint *cbuf, int w, int h, std::vector<float> *ipixels) { for (int i = 0; i < h; i++) { for (int j = 0; j < w; j++) { // 获得像素的颜色 int color = cbuf[w * i + j]; int red = ((color & 0x00FF0000) >> 16); int green = ((color & 0x0000FF00) >> 8); int blue = color & 0x000000FF; color = (red + green + blue) / 3; ipixels->push_back(color);//保存灰度值 } } }

使用后要把8bit灰度图转为RGB565:

jintArray result = env->NewIntArray(wo * ho); jint *cResult; cResult = env->GetIntArrayElements(result, false); int alpha = 0xFF << 24; for (int i = 0; i < ho; i++) { for (int j = 0; j < wo; j++) { // 获得像素的颜色 int color = (int) opixelsASIFT[wo * i + j]; color = alpha | (color << 16) | (color << 8) | color; cResult[wo * i + j] = color; } } env->ReleaseIntArrayElements(result, cResult, 0);

主类testASIFT.java的逻辑代码如下:

public class testASIFT extends Activity { /** Called when the activity is first created. */ ImageView imgView; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); this.setTitle("Android上使用ASIFT---hellogv"); imgView=(ImageView)this.findViewById(R.id.ImageView01); LibASIFT.initZoomSize(320, 480);//缩放目标的大小 int []size=LibASIFT.GetZoomSize();//判断是否设置成功 Log.e(String.valueOf(size[0]),String.valueOf(size[1])); Bitmap img1=((BitmapDrawable) getResources().getDrawable(R.drawable.adam1)).getBitmap(); int w1=img1.getWidth(),h1=img1.getHeight(); int[] pix1 = new int[w1 * h1]; img1.getPixels(pix1, 0, w1, 0, 0, w1, h1); //提取第一张图片的特征点 LibASIFT.initImage1(pix1, w1, h1, 2); Bitmap img2=((BitmapDrawable) getResources().getDrawable(R.drawable.adam2)).getBitmap(); int w2=img2.getWidth(),h2=img2.getHeight(); int[] pix2 = new int[w2 * h2]; img2.getPixels(pix2, 0, w2, 0, 0, w2, h2); int[] imgPixels=LibASIFT.Match2ImageForImg(pix2, w2, h2, 2);//两图匹配 int[] imgSize=LibASIFT.GetMatchedImageSize();//匹配结果图的大小 Bitmap imgResult=Bitmap.createBitmap(imgSize[0], imgSize[1], Config.RGB_565); imgResult.setPixels(imgPixels, 0, imgResult.getWidth(), 0, 0, imgResult.getWidth(), imgResult.getHeight()); imgView.setImageBitmap(imgResult);//显示结果 } }

ASIFT+OpenCV图像特征匹配实战VC工程源码 OpenCV包含头文件: #include "cv.h" #include "highgui.h" #include "cxcore.h" 核心代码如下: if (!m_pImage1||!m_pImage2) { AfxMessageBox("please,select 2 images!"); return; } UpdateData(TRUE); CvSize sz1 = cvSize(m_pImage1->width,m_pImage1->height); CvSize sz2 = cvSize(m_pImage2->width,m_pImage2->height); CvScalar s; IplImage *gimg1 = cvCreateImage(sz1,IPL_DEPTH_8U,1); cvCvtColor(m_pImage1,gimg1,CV_BGR2GRAY); IplImage *gimg2 = cvCreateImage(sz2,IPL_DEPTH_8U,1); cvCvtColor(m_pImage2,gimg2,CV_BGR2GRAY); size_t w1, h1; w1 = gimg1->width; h1 = gimg1->height; float * iarr1 = new float[w1*h1]; for(int i=0;i<h1;i++) { for(int j=0;j<w1;j++) { s=cvGet2D(gimg1,i,j); iarr1[i*w1+j] = s.val[0]; } } vector ipixels1(iarr1, iarr1 + w1 * h1); delete [] iarr1; size_t w2, h2; w2 = gimg2->width; h2 = gimg2->height; float * iarr2 = new float[w2*h2]; for(int i=0;i<h2;i++) { for(int j=0;j<w2;j++) { s=cvGet2D(gimg2,i,j); iarr2[i*w2+j] = s.val[0]; } } vector ipixels2(iarr2, iarr2 + w2 * h2); delete [] iarr2; float wS = IM_X; float hS = IM_Y; float zoom1=0, zoom2=0; int wS1=0, hS1=0, wS2=0, hS2=0; vector ipixels1_zoom, ipixels2_zoom; if (!m_bOrininal) { if (m_lWidth==0 || m_lHeight == 0) return; wS = m_lWidth; hS = m_lHeight; float InitSigma_aa = 1.6; float fproj_p, fproj_bg; char fproj_i; float *fproj_x4, *fproj_y4; int fproj_o; fproj_o = 3; fproj_p = 0; fproj_i = 0; fproj_bg = 0; fproj_x4 = 0; fproj_y4 = 0; float areaS = wS * hS; // Resize image 1 float area1 = w1 * h1; zoom1 = sqrt(area1/areaS); wS1 = (int) (w1 / zoom1); hS1 = (int) (h1 / zoom1); int fproj_sx = wS1; int fproj_sy = hS1; float fproj_x1 = 0; float fproj_y1 = 0; float fproj_x2 = wS1; float fproj_y2 = 0; float fproj_x3 = 0; float fproj_y3 = hS1; /* Anti-aliasing filtering along vertical direction */ if ( zoom1 > 1 ) { float sigma_aa = InitSigma_aa * zoom1 / 2; GaussianBlur1D(ipixels1,w1,h1,sigma_aa,1); GaussianBlur1D(ipixels1,w1,h1,sigma_aa,0); } // simulate a tilt: subsample the image along the vertical axis by a factor of t. ipixels1_zoom.resize(wS1*hS1); fproj (ipixels1, ipixels1_zoom, w1, h1, &fproj;_sx, &fproj;_sy, &fproj;_bg, &fproj;_o, &fproj;_p, &fproj;_i , fproj_x1 , fproj_y1 , fproj_x2 , fproj_y2 , fproj_x3 , fproj_y3, fproj_x4, fproj_y4); // Resize image 2 float area2 = w2 * h2; zoom2 = sqrt(area2/areaS); wS2 = (int) (w2 / zoom2); hS2 = (int) (h2 / zoom2); fproj_sx = wS2; fproj_sy = hS2; fproj_x2 = wS2; fproj_y3 = hS2; /* Anti-aliasing filtering along vertical direction */ if ( zoom1 > 1 ) { float sigma_aa = InitSigma_aa * zoom2 / 2; GaussianBlur1D(ipixels2,w2,h2,sigma_aa,1); GaussianBlur1D(ipixels2,w2,h2,sigma_aa,0); } // simulate a tilt: subsample the image along the vertical axis by a factor of t. ipixels2_zoom.resize(wS2*hS2); fproj (ipixels2, ipixels2_zoom, w2, h2, &fproj;_sx, &fproj;_sy, &fproj;_bg, &fproj;_o, &fproj;_p, &fproj;_i , fproj_x1 , fproj_y1 , fproj_x2 , fproj_y2 , fproj_x3 , fproj_y3, fproj_x4, fproj_y4); } else { ipixels1_zoom.resize(w1*h1); ipixels1_zoom = ipixels1; wS1 = w1; hS1 = h1; zoom1 = 1; ipixels2_zoom.resize(w2*h2); ipixels2_zoom = ipixels2; wS2 = w2; hS2 = h2; zoom2 = 1; } int num_of_tilts1 = m_lTilts1; int num_of_tilts2 = m_lTilts2; int verb = 0; // Define the SIFT parameters siftPar siftparameters; default_sift_parameters(siftparameters); vector< vector > keys1; vector< vector > keys2; int num_keys1=0, num_keys2=0; SetWindowText("Computing keypoints on the two images..."); CString str1,str2; time_t tstart, tend1,tend2; tstart = time(0); DWORD dstart = GetTickCount(); num_keys1 = compute_asift_keypoints(ipixels1_zoom, wS1, hS1, num_of_tilts1, verb, keys1, siftparameters); tend1 = time(0); m_lKeyNum1 = num_keys1; UpdateData(FALSE); str1.Format("Img1 Keypoints computation accomplished in %f s",difftime(tend1, tstart)); SetWindowText(str1); num_keys2 = compute_asift_keypoints(ipixels2_zoom, wS2, hS2, num_of_tilts2, verb, keys2, siftparameters); tend2 = time(0); m_lKeyNum2 = num_keys2; UpdateData(FALSE); str2.Format("Img2 Keypoints computation accomplished in %f s ,Matching the keypoints...",difftime(tend2, tstart)); SetWindowText(str2); //// Match ASIFT keypoints int num_matchings; matchingslist matchings; tstart = time(0); num_matchings = compute_asift_matches(num_of_tilts1, num_of_tilts2, wS1, hS1, wS2, hS2, verb, keys1, keys2, matchings, siftparameters); tend1 = time(0); DWORD dSpan = GetTickCount() - dstart; cout << "Keypoints matching accomplished in " << difftime(tend1, tstart) << " seconds." << endl; str2.Format("Keypoints matching accomplished in %f s",difftime(tend1, tstart)); SetWindowText(str2); m_lMatches = num_matchings; UpdateData(FALSE); str1.Format("Total time used:%d ms",dSpan); AfxMessageBox(str1); cvRelease((void**)&gimg1;); cvRelease((void**)&gimg2;); 参考网址:http://www.ipol.im/pub/art/2011/my-asift/
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值