转载请注明来源:http://blog.youkuaiyun.com/wanggsx918/article/details/23773729
效果图如下:
#import "UIImage+OpenCV.h"
#import "MyViewController.h"
#import <opencv2/highgui/ios.h>
#import <opencv2/imgproc/imgproc_c.h>
#import <opencv2/core/core_c.h>
#import <opencv2/features2d/features2d.hpp>
#import <opencv2/nonfree/features2d.hpp>
// Aperture value to use for the Canny edge detection
const int kCannyAperture = 7;
@interface MyViewController ()
- (void)processFrame;
@end
@implementation MyViewController
@synthesize imageView = _imageView;
@synthesize imageView1 = _imageView1;
- (void)viewDidLoad
{
[super viewDidLoad];
//[self TakeColorFromImageHSV];
UIImage *mImage = [UIImage imageNamed:@"防伪标签007.jpg"];
IplImage *srcIpl = [self convertToIplImage:mImage];
IplImage *dscIpl = cvCreateImage(cvGetSize(srcIpl), srcIpl->depth, 1);
[self SkinDetect:srcIpl withParam:dscIpl];
IplImage *dscIplNew = cvCreateImage(cvGetSize(srcIpl), IPL_DEPTH_8U, 3);
cvCvtColor(dscIpl, dscIplNew, CV_GRAY2BGR);
self.imageView.image = mImage;
UIImage *mImage1 = [UIImage imageNamed:@"temple005.jpg"];
self.imageView1.image = mImage1;
IplImage *srcIpl1 = [self convertToIplImage:mImage1];
IplImage *dscIpl1 = cvCreateImage(cvGetSize(srcIpl1), srcIpl1 ->depth, 1);
[self SkinDetect:srcIpl1 withParam:dscIpl1];
IplImage *dscIplNew1 = cvCreateImage(cvGetSize(srcIpl1), IPL_DEPTH_8U, 3);
cvCvtColor(dscIpl1, dscIplNew1, CV_GRAY2BGR);
IplImage *src = srcIpl;
IplImage *srcResult = srcIpl; //用来显示
IplImage *templat = srcIpl1;
IplImage *result;
int srcW, srcH, templatW, templatH, resultH, resultW;
srcW = src->width;
srcH = src->height;
templatW = templat->width;
templatH = templat->height;
resultW = srcW - templatW + 1;
resultH = srcH - templatH + 1;
result = cvCreateImage(cvSize(resultW, resultH), 32, 1);
cvMatchTemplate(src, templat, result, CV_TM_SQDIFF);
double minValue, maxValue;
CvPoint minLoc, maxLoc;
cvMinMaxLoc(result, &minValue, &maxValue, &minLoc, &maxLoc);
cvRectangle(srcResult, minLoc, cvPoint(minLoc.x + templatW, minLoc.y+ templatH), cvScalar(0,0,255));
self.imageView1.image = [self convertToUIImage:srcResult];
}
/// UIImage类型转换为IPlImage类型
-(IplImage*)convertToIplImage:(UIImage*)image
{
CGImageRef imageRef = image.CGImage;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
IplImage *iplImage = cvCreateImage(cvSize(image.size.width, image.size.height), IPL_DEPTH_8U, 4);
CGContextRef contextRef = CGBitmapContextCreate(iplImage->imageData, iplImage->width, iplImage->height, iplImage->depth, iplImage->widthStep, colorSpace, kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault);
CGContextDrawImage(contextRef, CGRectMake(0, 0, image.size.width, image.size.height), imageRef);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);
IplImage *ret = cvCreateImage(cvGetSize(iplImage), IPL_DEPTH_8U, 3);
cvCvtColor(iplImage, ret, CV_RGB2BGR);
cvReleaseImage(&iplImage);
return ret;
}
/// IplImage类型转换为UIImage类型
-(UIImage*)convertToUIImage:(IplImage*)image
{
cvCvtColor(image, image, CV_BGR2RGB);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
NSData *data = [NSData dataWithBytes:image->imageData length:image->imageSize];
CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)data);
CGImageRef imageRef = CGImageCreate(image->width, image->height, image->depth, image->depth * image->nChannels, image->widthStep, colorSpace, kCGImageAlphaNone | kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault);
UIImage *ret = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return ret;
}
- (void)viewDidUnload
{
[super viewDidUnload];
self.imageView = nil;
self.imageView1=nil;
delete _videoCapture;
_videoCapture = nil;
}
@end