概述:
opencv内部也有双目标定的函数,但是结果有时候会飘,所以想先用MATLAB标定工具箱试试看。使用MATLAB先队两个相机进行单目标定,然后进行双目标定,将结果保存在xml文件中。xml文件时opencv支持的文件格式,更详细的解释可以去搜索相关资料。
1.工具准备:
MATLAB标定工具箱。
这一步我认为应该单独强调一下。我的MATLAB版本是2014a,应用程序里面自带一个标定工具箱但是只支持单目。后来在http://www.vision.caltech.edu/bouguetj/calib_doc/#examples了解到应该再下载一个双目标定的工具箱。下载链接
将其解压到Matlab的工作目录下,然后将其包含到matlab的工作路径下。运行calib_gui就可以打开matlab标定工具箱主界面:
2.标定过程
1.点击Image names会出现当前MATLAB路径下的所有文件,如上图,输入L表示对L开头的图片进行标定,然后输入后缀b表示图片格式是bmp格式的。
2.点击Extract grid corners,检测角点,这个过程比较麻烦,一路回车下来,会依次弹出各张图片,需要用鼠标顺时针或逆时针选定最外层的角点。选择完第一张的时候会要求输入棋盘格的宽度,我的棋盘格长宽都是15mm。
之后就是对所有L开头的图片选取四个最外层角点,这一步完成后,点击Calibration进行标定,点击Save保存结果Calib_Result.mat,将Calib_Results.mat修改为Calib_Results_left.mat方便进行双目标定。
右相机同理。
3.双目标定
在MATLAB控制台输入stereo_gui,打开双目标定工具箱,点击Load left and right calibration files,再点击stereo calibration进行标定。得到如下结果:
3.保存为xml文件
Distortion_Camera_Left.xml
<?xml version="1.0"?>
<opencv_storage>
<Distortion type_id="opencv-matrix">
<rows>5</rows>
<cols>1</cols>
<dt>d</dt>
<data>
-0.15761 4.24565 -0.00236 0.02423 0.00000
</data>
</Distortion>
</opencv_storage>
Distortion_Camera_Right.xml
<?xml version="1.0"?>
<opencv_storage>
<Distortion type_id="opencv-matrix">
<rows>5</rows>
<cols>1</cols>
<dt>d</dt>
<data>
-0.09281 1.60263 0.00408 -0.00825 0.00000
</data>
</Distortion>
</opencv_storage>
Intrinsics_Camera_Left.xml
<?xml version="1.0"?>
<opencv_storage>
<Intrinsics_Camera_Left type_id="opencv-matrix">
<rows>3</rows>
<cols>3</cols>
<dt>d</dt>
<data>
2752.23366 0. 732.90263
0. 2786.18468 529.17431
0. 0. 1.
</data>
</Intrinsics_Camera_Left>
</opencv_storage>
Intrinsics_Camera_Right.xml
<?xml version="1.0"?>
<opencv_storage>
<Intrinsics_Camera_Right type_id="opencv-matrix">
<rows>3</rows>
<cols>3</cols>
<dt>d</dt>
<data>
2667.95661 0. 584.91164
0. 2699.92379 539.37941
0. 0. 1.
</data>
</Intrinsics_Camera_Right>
</opencv_storage>
RotRodrigues.xml
<?xml version="1.0"?>
<opencv_storage>
<RotRodrigues type_id="opencv-matrix">
<rows>3</rows>
<cols>1</cols>
<dt>d</dt>
<data>
0.02812 0.54890 0.00618
</data>
</RotRodrigues>
</opencv_storage>
Translation.xml
<?xml version="1.0"?>
<opencv_storage>
<Translation type_id="opencv-matrix">
<rows>3</rows>
<cols>1</cols>
<dt>d</dt>
<data>
-160.89829 -1.36460 25.91248
</data>
</Translation>
</opencv_storage>
4.opencv读取xml文件,并进行双目校正
//读取内部参数
CvMat *Intrinsics_Camera_Left = (CvMat *)cvLoad("Intrinsics_Camera_Left.xml");
CvMat *Intrinsics_Camera_Right = (CvMat *)cvLoad("Intrinsics_Camera_Right.xml");
CvMat *Distortion_Camera_Left = (CvMat *)cvLoad("Distortion_Camera_Left.xml");
CvMat *Distortion_Camera_Right = (CvMat *)cvLoad("Distortion_Camera_Right.xml");
CvMat *Translation_matlab = (CvMat *)cvLoad("Translation.xml");
CvMat *RotRodrigues_matlab = (CvMat *)cvLoad("RotRodrigues.xml");
CvMat *R_opencv = cvCreateMat(3, 3, CV_64F);
cvRodrigues2(RotRodrigues_matlab, R_opencv);
//创建映射矩阵
IplImage *Left_Mapx = cvCreateImage(cvSize(1280, 960), IPL_DEPTH_32F, 1);
IplImage *Left_Mapy = cvCreateImage(cvSize(1280, 960), IPL_DEPTH_32F, 1);
IplImage *Right_Mapx = cvCreateImage(cvSize(1280, 960), IPL_DEPTH_32F, 1);
IplImage *Right_Mapy = cvCreateImage(cvSize(1280, 960), IPL_DEPTH_32F, 1);
CvMat *Rl = cvCreateMat(3, 3, CV_64F);
CvMat *Rr = cvCreateMat(3, 3, CV_64F);
CvMat *Pl = cvCreateMat(3, 4, CV_64F);
CvMat *Pr = cvCreateMat(3, 4, CV_64F);
//双目校正
cvStereoRectify(Intrinsics_Camera_Left, Intrinsics_Camera_Right,
Distortion_Camera_Left, Distortion_Camera_Right,
cvSize(1280, 960), R_opencv, Translation_matlab,
Rl, Rr, Pl, Pr, 0, 2048, 0);//增加图像缩放,去除死区
//畸变矫正
cvInitUndistortRectifyMap(Intrinsics_Camera_Left, Distortion_Camera_Left, Rl, Pl,
Left_Mapx, Left_Mapy);
cvInitUndistortRectifyMap(Intrinsics_Camera_Right, Distortion_Camera_Right, Rr, Pr,
Right_Mapx, Right_Mapy);
cvRemap(IplimgLeft, img_left_Change, Left_Mapx, Left_Mapy);
cvRemap(IplimgRight, img_right_Change, Right_Mapx, Right_Mapy);
cvLine(img_left_Change, cvPoint(0, 96 * 1), cvPoint(1280 - 1, 96), cvScalar(255, 0, 0));
cvLine(img_left_Change, cvPoint(0, 96 * 2), cvPoint(1280 - 1, 96 * 2), cvScalar(255, 0, 0));
cvLine(img_left_Change, cvPoint(0, 96 * 3), cvPoint(1280 - 1, 96 * 3), cvScalar(255, 0, 0));
cvLine(img_left_Change, cvPoint(0, 96 * 4), cvPoint(1280 - 1, 96 * 4), cvScalar(255, 0, 0));
cvLine(img_left_Change, cvPoint(0, 96 * 5), cvPoint(1280 - 1, 96 * 5), cvScalar(255, 0, 0));
cvLine(img_left_Change, cvPoint(0, 96 * 6), cvPoint(1280 - 1, 96 * 6), cvScalar(255, 0, 0));
cvLine(img_left_Change, cvPoint(0, 96 * 7), cvPoint(1280 - 1, 96 * 7), cvScalar(255, 0, 0));
cvLine(img_left_Change, cvPoint(0, 96 * 8), cvPoint(1280 - 1, 96 * 8), cvScalar(255, 0, 0));
cvLine(img_left_Change, cvPoint(0, 96 * 9), cvPoint(1280 - 1, 96 * 9), cvScalar(255, 0, 0));
cvLine(img_right_Change, cvPoint(0, 96 * 1), cvPoint(1280 - 1, 96), cvScalar(255, 0, 0));
cvLine(img_right_Change, cvPoint(0, 96 * 2), cvPoint(1280 - 1, 96 * 2), cvScalar(255, 0, 0));
cvLine(img_right_Change, cvPoint(0, 96 * 3), cvPoint(1280 - 1, 96 * 3), cvScalar(255, 0, 0));
cvLine(img_right_Change, cvPoint(0, 96 * 4), cvPoint(1280 - 1, 96 * 4), cvScalar(255, 0, 0));
cvLine(img_right_Change, cvPoint(0, 96 * 5), cvPoint(1280 - 1, 96 * 5), cvScalar(255, 0, 0));
cvLine(img_right_Change, cvPoint(0, 96 * 6), cvPoint(1280 - 1, 96 * 6), cvScalar(255, 0, 0));
cvLine(img_right_Change, cvPoint(0, 96 * 7), cvPoint(1280 - 1, 96 * 7), cvScalar(255, 0, 0));
cvLine(img_right_Change, cvPoint(0, 96 * 8), cvPoint(1280 - 1, 96 * 8), cvScalar(255, 0, 0));
cvLine(img_right_Change, cvPoint(0, 96 * 9), cvPoint(1280 - 1, 96 * 9), cvScalar(255, 0, 0));
6.效果
如有任何问题,邮件gao2014#nwsuaf.edu.cn
源码: