多媒体编程ios摄像头图像抓取工具类

本文详细介绍了iOS环境下如何利用AVFoundation框架实现摄像头视频预览,并主动抓取图像数据,包括创建视频捕捉实例、设置分辨率、启动与停止捕获、以及获取图像数据的过程。同时,文章探讨了将被动接收图像改为主动抓取的方法,以提升应用性能。

转载自  http://www.it165.net/pro/html/201408/19449.html

  • 工具类提供预览图像画面,自动处理旋转,并且以主动方式抓取图像(这样帧率可以无限大)

    系统的接口多是异步接收图像,像我这种强迫症怎么受得了,必须吧被动接收图像的方式改成主动抓取。

    头文件

     

    01. #import <Foundation/Foundation.h>
    02. #import <AVFoundation/AVFoundation.h>
    03.  
    04. //这些比例都是4:3的比例。
    05. typedef enum TKVideoFrameSize
    06. {
    07. tkVideoFrame480x360     = 480  << 16 | 360,
    08. tkVideoFrame720x540     = 720  << 16 | 540, //用这个分辨率,效率会快很多。
    09. }TKVideoFrameSize;
    10.  
    11.  
    12.  
    13. @interface TKVideoCapture : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
    14.  
    15. - (bool) create:(UIView*)preview frame:(TKVideoFrameSize)type ;
    16. - (bool) destory;
    17.  
    18. - (bool) start ;
    19. - (bool) stop ;
    20.  
    21. //返回 字节顺序 BGRA BGRA 的图像数据。
    22. - (uint8_t*) get_image_rgb32:(uint32_t*)length ;
    23.  
    24. @end

     

    实现文件:(里面用到了那个Lock可以去上一篇文章找)

     

    001. #import "TKVideoCapture.h"
    002. #import <UIKit/UIKit.h>
    003. #import <CoreGraphics/CoreGraphics.h>
    004. #import <CoreVideo/CoreVideo.h>
    005. #import <CoreMedia/CoreMedia.h>
    006. #import "TKLock.h"
    007.  
    008. @interface TKVideoCapture ()
    009. {
    010. TKVideoFrameSize            _frametype      ;
    011. UIView*                     _preview        ;
    012. AVCaptureSession*           _captureSession ;
    013. AVCaptureVideoPreviewLayer* _capturePreview ;
    014. AVCaptureVideoDataOutput *  _captureOutput  ;
    015. AVCaptureDevice*            _captureDevice  ;
    016. AVCaptureDeviceInput*       _captureInput   ;
    017.  
    018. uint8_t*                    _buffer_temp    ; //每一帧数据都存储到这个缓存中
    019. uint8_t*                    _buffer_obox    ; //需要使用时,从tempbuf 拷贝过来。
    020. CGRect                      _subImageRect   ; //子图片的位置。
    021.  
    022. TKLock*                     _buffer_lock    ;
    023. }
    024.  
    025. @end
    026.  
    027.  
    028. @implementation TKVideoCapture
    029.  
    030. - (void) do_create
    031. {
    032. self->_captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] ;
    033. self->_captureInput  = [AVCaptureDeviceInput deviceInputWithDevice:self->_captureDevice  error:nil];
    034. self->_captureOutput = [[AVCaptureVideoDataOutput alloc] init];
    035.  
    036. if(self->_captureOutput)
    037. [self->_captureOutput setAlwaysDiscardsLateVideoFrames:true];
    038.  
    039. dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL);
    040. [self->_captureOutput setSampleBufferDelegate:self queue:queue];
    041.  
    042. dispatch_release(queue);
    043.  
    044.  
    045. NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    046. NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    047.  
    048. NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
    049.  
    050. [self->_captureOutput setVideoSettings:videoSettings];
    051. self->_captureSession = [[AVCaptureSession alloc] init];
    052.  
    053. uint16_t width  = (uint16_t)(((uint32_t)_frametype) >> 16) ;
    054. uint16_t height = (uint16_t)(((uint32_t)_frametype) & 0xFFFF) ;
    055.  
    056. _buffer_temp = (uint8_t*)malloc(width * height * 4);
    057. _buffer_obox = (uint8_t*)malloc(width * height * 4);
    058.  
    059. //0.75是预定比例
    060. switch (_frametype) {
    061. case tkVideoFrame480x360:
    062. {
    063. _captureSession.sessionPreset = AVCaptureSessionPreset640x480 ;
    064. _subImageRect = CGRectMake((640-360)/2, 0, 360, 480);
    065. break;
    066. }
    067. case tkVideoFrame720x540:
    068. {
    069. _captureSession.sessionPreset = AVCaptureSessionPresetiFrame1280x720 ;
    070. _subImageRect = CGRectMake((1280-540)/2, 0, 540, 720);
    071. break;
    072. }
    073. default:
    074. break;
    075. }
    076.  
    077. if(self->_captureInput != nil)
    078. [self->_captureSession addInput:self->_captureInput];
    079.  
    080. [self->_captureSession addOutput:self->_captureOutput];
    081.  
    082. self->_capturePreview = [AVCaptureVideoPreviewLayer layerWithSession: self->_captureSession];
    083. self->_capturePreview.frame = self->_preview.bounds;//CGRectMake(100, 0, 100, 100);
    084. self->_capturePreview.videoGravity = AVLayerVideoGravityResizeAspectFill;
    085. self->_capturePreview.connection.videoOrientation = [self getOrientation] ;
    086.  
    087. [self->_preview.layer addSublayer: self->_capturePreview];
    088.  
    089. _buffer_lock = [[TKLock alloc] init];
    090. [_buffer_lock open];
    091. }
    092.  
    093. - (bool) create:(UIView*)preview frame:(TKVideoFrameSize)type
    094. {
    095. self->_frametype     = type ;
    096. self->_preview       = preview ;
    097.  
    098. [self performSelectorOnMainThread:@selector(do_create) withObject:self waitUntilDone:true];
    099.  
    100. return true ;
    101. }
    102.  
    103. - (AVCaptureVideoOrientation) getOrientation
    104. {
    105. UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation ;
    106. switch(orientation)
    107. {
    108. case UIInterfaceOrientationPortrait: return AVCaptureVideoOrientationPortrait;
    109. case UIInterfaceOrientationPortraitUpsideDown: return AVCaptureVideoOrientationPortraitUpsideDown;
    110. case UIInterfaceOrientationLandscapeLeft: return AVCaptureVideoOrientationLandscapeLeft;
    111. case UIInterfaceOrientationLandscapeRight: return AVCaptureVideoOrientationLandscapeRight;
    112. }
    113. return AVCaptureVideoOrientationLandscapeLeft ;
    114. }
    115.  
    116. - (void) do_destory
    117. {
    118. [_buffer_lock close];
    119. [_buffer_lock release];
    120. _buffer_lock = nil ;
    121.  
    122. free(_buffer_temp);
    123. free(_buffer_obox);
    124. _buffer_temp = NULL ;
    125. _buffer_obox = NULL ;
    126.  
    127. [self->_captureSession stopRunning];
    128. [self->_capturePreview removeFromSuperlayer];
    129. [self->_captureOutput  release];
    130. [self->_captureSession release];
    131. self->_captureSession = nil ;
    132. self->_capturePreview = nil ;
    133. self->_captureOutput  = nil ;
    134. self->_captureDevice  = nil ;
    135. self->_captureInput   = nil ;
    136. self->_preview        = nil ;
    137. }
    138.  
    139. - (bool) destory
    140. {
    141. [self performSelectorOnMainThread:@selector(do_destory) withObject:self waitUntilDone:true];
    142. return true ;
    143. }
    144.  
    145. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
    146. {
    147. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    148. CVPixelBufferLockBaseAddress(imageBuffer,0);
    149. uint8_t* baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
    150.  
    151. size_t width  = CVPixelBufferGetWidth(imageBuffer);
    152. size_t height = CVPixelBufferGetHeight(imageBuffer);
    153. size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    154.  
    155. CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    156. CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, baseAddress, bytesPerRow * height, NULL);
    157.  
    158. CGImageRef imageRef = CGImageCreate(width, height, 8, 32, bytesPerRow, colorSpace,
    159. kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst,
    160. provider, NULL, false, kCGRenderingIntentDefault);
    161.  
    162. CGImageRef subImageRef = CGImageCreateWithImageInRect(imageRef, _subImageRect);
    163.  
    164. size_t subWidth  = _subImageRect.size.height ;
    165. size_t subHeight = _subImageRect.size.width  ;
    166.  
    167. CGContextRef context = CGBitmapContextCreate(NULL, subWidth, subHeight,
    168. CGImageGetBitsPerComponent(subImageRef), 0,
    169. CGImageGetColorSpace(subImageRef),
    170. CGImageGetBitmapInfo(subImageRef));
    171.  
    172.  
    173. CGContextTranslateCTM(context, 0, subHeight);
    174. CGContextRotateCTM(context, -M_PI/2);
    175.  
    176. CGContextDrawImage(context, CGRectMake(0, 0, subHeight, subWidth), subImageRef);
    177.  
    178. uint8_t* data = (uint8_t*)CGBitmapContextGetData(context);
    179.  
    180. [_buffer_lock lock];
    181. memcpy(_buffer_temp, data, subWidth * subHeight * 4);
    182. [_buffer_lock unlock];
    183.  
    184. CGContextRelease(context);
    185. CGImageRelease(imageRef);
    186. CGImageRelease(subImageRef);
    187. CGDataProviderRelease(provider);
    188. CGColorSpaceRelease(colorSpace);
    189. CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    190. }
    191.  
    192. - (void) do_start
    193. {
    194. [self->_captureSession startRunning];
    195. }
    196.  
    197. - (void) do_stop
    198. {
    199. [self->_captureSession stopRunning];
    200. }
    201.  
    202. - (bool) start
    203. {
    204. [self performSelectorOnMainThread:@selector(do_start) withObject:self waitUntilDone:true];
    205. return true ;
    206. }
    207. - (bool) stop
    208. {
    209. [self performSelectorOnMainThread:@selector(do_stop) withObject:self waitUntilDone:true];
    210. return true ;
    211. }
    212.  
    213. - (uint8_t*) get_image_rgb32:(uint32_t*)length
    214. {
    215. uint16_t width  = (uint16_t)(((uint32_t)_frametype) >> 16) ;
    216. uint16_t height = (uint16_t)(((uint32_t)_frametype) & 0xFFFF) ;
    217.  
    218. //从摄像头输出数据采集数据
    219. [_buffer_lock lock];
    220. memcpy(_buffer_obox, _buffer_temp, width * height * 4);
    221. [_buffer_lock unlock];
    222.  
    223. if(length)
    224. *length = width * height * 4 ;
    225.  
    226. return _buffer_obox ;
    227. }
    228.  
    229.  
    230. @end

     

 

转载于:https://www.cnblogs.com/allanliu/p/4203518.html

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值