这里简单说下 iOS 的摄像头采集。
首先初始化AVCaptureSession,说到Session,有没有人想到AVAudioSession呢?
设置采集的 Video 和 Audio 格式,这两个是分开设置的,也就是说,你可以只采集视频。
实现 AVCaptureOutputDelegate:
关于实时编码H.264和AAC Buffer,这里又是两个技术点,之后再讲吧。
配置完成,现在启动 Session:
1.1 附加任务:将当前硬件采集视频图像显示到屏幕
很简单,发送端直接使用自家的AVCaptureVideoPreviewLayer
显示,so
easy
然后将这个layer添加到界面中即可显示了。
具体实现代码:
#import "MyAVController.h"
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
@interface MyAVController()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
- (void)initCapture;
@end
#import "MyAVController.h"
@implementation MyAVController
{
AVCaptureSession *_captureSession;
UIImageView *_imageView;
CALayer *_customLayer;
AVCaptureVideoPreviewLayer *_prevLayer;
AVCaptureConnection *_videoConnection;
AVCaptureConnection *_audioConnection;
}
#pragma mark -
#pragma mark Initialization
- (id)init {
self = [super init];
if (self) {
self.imageView = nil;
self.prevLayer = nil;
self.customLayer = nil;
}
return self;
}
- (void)viewDidLoad {
[self initCapture];
}
- (void)initCapture {
//配置采集输入源(摄像头)
AVCaptureDevice*videoDevice=[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//用设备初始化一个采集的输入对象
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
//captureOutput.minFrameDuration = CMTimeMake(1, 10);
//配置采集输出,即我们取得视频图像的接口
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
//配置输出视频图像格式
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary
dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:captureOutput];
[self.captureSession startRunning];
//保存Connection,用于在SampleBufferDelegate中判断数据来源(是Video/Audio?)
_videoConnection=[captureOutput
connectionWithMediaType:AVMediaTypeVideo];
//view
self.customLayer = [CALayer layer];
self.customLayer.frame = self.view.bounds;
self.customLayer.transform = CATransform3DRotate(
CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer];
self.imageView = [[UIImageView alloc] init];
self.imageView.frame = CGRectMake(0, 0, 100, 100);
[self.view addSubview:self.imageView];
self.prevLayer = [AVCaptureVideoPreviewLayer
layerWithSession: self.captureSession];
self.prevLayer.frame = CGRectMake(100, 0, 100, 100);
self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer: self.prevLayer];
}
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
// 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断
if (connection == _videoConnection) { // Video
/*
// 取得当前视频尺寸信息
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
int width = CVPixelBufferGetWidth(pixelBuffer);
int height = CVPixelBufferGetHeight(pixelBuffer);
NSLog(@"video width: %d height: %d", width, height);
*/
NSLog(@"在这里获得video sampleBuffer,做进一步处理(编码H.264)");
} else if (connection == _audioConnection) { // Audio
NSLog(@"这里获得audio sampleBuffer,做进一步处理(编码AAC)");
}
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
// 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress,
width, height, 8, bytesPerRow, colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
[self.customLayer performSelectorOnMainThread:@selector(setContents:)
withObject: (__bridge id) newImage waitUntilDone:YES];
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
orientation:UIImageOrientationRight];
CGImageRelease(newImage);
[self.imageView performSelectorOnMainThread:@selector(setImage:)
withObject:image waitUntilDone:YES];
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
[pool drain];
}
#pragma mark -
#pragma mark Memory management
- (void)viewDidUnload {
self.imageView = nil;
self.customLayer = nil;
self.prevLayer = nil;
}
- (void)dealloc {
[self.captureSession release];
[super dealloc];
}
@end