#import
"BZViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface BZViewController ()<AVCaptureMetadataOutputObjectsDelegate>
@property (strong,nonatomic)AVCaptureDevice *device;
@property (strong,nonatomic)AVCaptureDeviceInput *input;//调用所有的输入硬件。例如摄像头和麦克风
@property (strong,nonatomic)AVCaptureMetadataOutput *output;
@property (strong,nonatomic)AVCaptureSession *session;// 控制输入和输出设备之间的数据传递
@property (strong,nonatomic)AVCaptureVideoPreviewLayer *preview;//镜头捕捉到得预览图层
@end
@implementation BZViewController
- (void)viewDidLoad {
[super viewDidLoad];
UIView *view = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 200, 200)];
view.backgroundColor = [UIColor greenColor];
[self.view addSubview:view];
UIButton *button = [[UIButton alloc] initWithFrame:CGRectMake(50, 50, 100, 40)];
button.backgroundColor = [UIColor redColor];
[button setTitle:@"btn" forState:UIControlStateNormal];
[button addTarget:self action:@selector(btnClick) forControlEvents:UIControlEventTouchUpInside];
[view addSubview:button];
}
- (void)btnClick{
// Device
self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// Input
self.input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil];
// Output
self.output = [[AVCaptureMetadataOutput alloc]init];
[self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
// Session
self.session = [[AVCaptureSession alloc]init];
[self.session setSessionPreset:AVCaptureSessionPresetHigh];
if ([self.session canAddInput:self.input])
{
[self.session addInput:self.input];
}
if ([self.session canAddOutput:self.output])
{
[self.session addOutput:self.output];
}
// 条码类型
self.output.metadataObjectTypes =@[AVMetadataObjectTypeQRCode];
// Preview
self.preview = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.preview.videoGravity =AVLayerVideoGravityResizeAspectFill;
self.preview.frame =CGRectMake(0,0,self.view.frame.size.width,self.view.frame.size.height);
[self.view.layer addSublayer:self.preview];
// Start
[self.session startRunning];
}
#pragma mark AVCaptureMetadataOutputObjectsDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
NSString *stringValue;
if ([metadataObjects count] >0) {
AVMetadataMachineReadableCodeObject *metadataObject = [metadataObjects objectAtIndex:0];
stringValue = metadataObject.stringValue;
}
[_session stopRunning];
NSLog(@"%@",stringValue);
}
#import <AVFoundation/AVFoundation.h>
@interface BZViewController ()<AVCaptureMetadataOutputObjectsDelegate>
@property (strong,nonatomic)AVCaptureDevice *device;
@property (strong,nonatomic)AVCaptureDeviceInput *input;//调用所有的输入硬件。例如摄像头和麦克风
@property (strong,nonatomic)AVCaptureMetadataOutput *output;
@property (strong,nonatomic)AVCaptureSession *session;// 控制输入和输出设备之间的数据传递
@property (strong,nonatomic)AVCaptureVideoPreviewLayer *preview;//镜头捕捉到得预览图层
@end
@implementation BZViewController
- (void)viewDidLoad {
[super viewDidLoad];
UIView *view = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 200, 200)];
view.backgroundColor = [UIColor greenColor];
[self.view addSubview:view];
UIButton *button = [[UIButton alloc] initWithFrame:CGRectMake(50, 50, 100, 40)];
button.backgroundColor = [UIColor redColor];
[button setTitle:@"btn" forState:UIControlStateNormal];
[button addTarget:self action:@selector(btnClick) forControlEvents:UIControlEventTouchUpInside];
[view addSubview:button];
}
- (void)btnClick{
// Device
self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// Input
self.input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil];
// Output
self.output = [[AVCaptureMetadataOutput alloc]init];
[self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
// Session
self.session = [[AVCaptureSession alloc]init];
[self.session setSessionPreset:AVCaptureSessionPresetHigh];
if ([self.session canAddInput:self.input])
{
[self.session addInput:self.input];
}
if ([self.session canAddOutput:self.output])
{
[self.session addOutput:self.output];
}
// 条码类型
self.output.metadataObjectTypes =@[AVMetadataObjectTypeQRCode];
// Preview
self.preview = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.preview.videoGravity =AVLayerVideoGravityResizeAspectFill;
self.preview.frame =CGRectMake(0,0,self.view.frame.size.width,self.view.frame.size.height);
[self.view.layer addSublayer:self.preview];
// Start
[self.session startRunning];
}
#pragma mark AVCaptureMetadataOutputObjectsDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
NSString *stringValue;
if ([metadataObjects count] >0) {
AVMetadataMachineReadableCodeObject *metadataObject = [metadataObjects objectAtIndex:0];
stringValue = metadataObject.stringValue;
}
[_session stopRunning];
NSLog(@"%@",stringValue);
}