贡献作者 -【XJDomain】
博客XJ: https://my.oschina.net/shengbingli/blog
GitHub: https://github.com/lishengbing/XJQRCodeToolDemo
第三讲:采集中
>切换镜头
@IBAction func switchScene() {
// 1:获取之前的镜头
guard var position = videoInput?.device.position else { return }
// 2:获取当前显示的镜头
position = position == .front ? .back : .front
// 3:根据当前镜头创建新的device
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as? [AVCaptureDevice]
guard let device = devices?.filter({$0.position == position}).first else { return }
// 4: 根据新的device创建新的input
guard let videoInput = try? AVCaptureDeviceInput(device: device) else { return }
// 4:在session中切换input
session.beginConfiguration()
session.removeInput(self.videoInput)
if session.canAddInput(videoInput) {
session.addInput(videoInput)
}
session.commitConfiguration()
self.videoInput = videoInput
}
范例:
//
// ViewController.swift
// 视频采集
//
// Created by 李胜兵 on 2015/11/23.
// Copyright © 2015年 付公司. All rights reserved.
//
import UIKit
import AVFoundation
class ViewController: UIViewController {
fileprivate lazy var videoQueue = DispatchQueue.global()
fileprivate lazy var audioQueue = DispatchQueue.global()
fileprivate lazy var session : AVCaptureSession = AVCaptureSession()
fileprivate lazy var previewLayer : AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session)
fileprivate var videoInput : AVCaptureDeviceInput?
fileprivate var videoOutput : AVCaptureVideoDataOutput?
}
// MARK: - 视频的开始采集和停止采集&切换镜头
extension ViewController {
@IBAction func switchScene() {
// 1:获取之前的镜头
guard var position = videoInput?.device.position else { return }
// 2:获取当前显示的镜头
position = position == .front ? .back : .front
// 3:根据当前镜头创建新的device
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as? [AVCaptureDevice]
guard let device = devices?.filter({$0.position == position}).first else { return }
// 4: 根据新的device创建新的input
guard let videoInput = try? AVCaptureDeviceInput(device: device) else { return }
// 4:在session中切换input
session.beginConfiguration()
session.removeInput(self.videoInput)
if session.canAddInput(videoInput) {
session.addInput(videoInput)
}
session.commitConfiguration()
self.videoInput = videoInput
}
@IBAction func starCapture() {
stopCapture()
// 1:设置音频的输入 && 输出
setupVideo()
// 2:设置音频的输入 && 输出
setupAudio()
// 2:给用户一个预览图层(可选)
previewLayer.frame = view.bounds
view.layer.insertSublayer(previewLayer, at: 0)
// 3:开始采集
session.startRunning()
}
@IBAction func stopCapture() {
print("停止采集...")
session.stopRunning()
previewLayer.removeFromSuperlayer()
}
}
extension ViewController {
// 需要配置视频key info.plist : NSCameraUsageDescription
fileprivate func setupVideo() {
// 1:给捕捉会话设置输入源(摄像头)
guard let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as? [AVCaptureDevice] else {
print("摄像头不可用")
return
}
/* 获取摄像头设备方式1
let device = devices.filter { (device : AVCaptureDevice) -> Bool in
return device.position == .front
}.first
*/
// 2:获取摄像头设备方式2
guard let device = devices.filter({$0.position == .back}).first else { return }
guard let vedioInput = try? AVCaptureDeviceInput(device: device) else { return }
self.videoInput = vedioInput
if session.canAddInput(vedioInput) {
session.addInput(vedioInput)
}
// 3:给捕捉会话设置输出源
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: videoQueue)
if session.canAddOutput(videoOutput) {
session.addOutput(videoOutput)
}
// 4:获取video对应的connection
//connection = videoOutput.connection(withMediaType: AVMediaTypeVideo)
self.videoOutput = videoOutput
}
// 需要配置音频key info.plist : NSMicrophoneUsageDescription
fileprivate func setupAudio() {
// 1:设置音频的输入(话筒)
// 1.1获取话筒设备
let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
// 1.2根据device创建AVCaptureDeviceInput
guard let audioInput = try? AVCaptureDeviceInput(device: device) else { return }
if session.canAddInput(audioInput) {
session.addInput(audioInput)
}
// 2:给会话设置音频输出源
let audioOutput = AVCaptureAudioDataOutput()
audioOutput.setSampleBufferDelegate(self, queue: audioQueue)
if session.canAddOutput(audioOutput) {
session.addOutput(audioOutput)
}
}
}
extension ViewController : AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if connection == self.videoOutput?.connection(withMediaType: AVMediaTypeVideo) {
print("采集到-视频=画面")
}else {
print("采集到-音频-画面")
}
}
}