didFailWithError: Error Domain=kCLErrorDomain Code=0 “The operation couldn’t be completed. (kCLError

This error also occurs if you have Scheme/Edit Scheme/Options/Allow Location Simulation checked but don't have a default location set.

Please check this it may resolve your issue.

// // CustomCameraView.swift // CheckInCamera // // Created by csld on 2025/7/17. // import UIKit import AVFoundation import CoreVideo import VideoToolbox // UIImage扩展,用于添加水印 extension UIImage { func addVideoWatermark(_ watermarkView: UIView, targetFrame: CGRect) -> UIImage? { // 将UIView转换为UIImage let watermarkImage = watermarkView.convertToImage() // 开始图片上下文 - 使用设备的实际缩放因子 UIGraphicsBeginImageContextWithOptions(size, true, UIScreen.main.scale) defer { UIGraphicsEndImageContext() } // 绘制原始图片 draw(in: CGRect(origin: .zero, size: size)) // 计算水印在目标帧中的大小和位置 let watermarkSize = watermarkImage.size let targetSize = targetFrame.size // 计算缩放比例 let baseScaleX = targetSize.width / watermarkSize.width let baseScaleY = targetSize.height / watermarkSize.height let scaleX = baseScaleX * 2.2 // 横向放大倍数 let scaleY = baseScaleY * 2.2 // 纵向放大倍数 // 计算缩放后的水印大小 let scaledSize = CGSize( width: watermarkSize.width * scaleX, height: watermarkSize.height * scaleY ) // 计算水印在目标帧中的居中位置(原始逻辑) var x = targetFrame.origin.x + (targetFrame.size.width - scaledSize.width) / 2 var y = targetFrame.origin.y + (targetFrame.size.height - scaledSize.height) / 2 // 关键修复:修正Y轴方向(如果视频帧Y轴与预览层相反) // 原理:用视频高度减去计算出的Y值,实现“底部”位置反转 y = size.height - y - scaledSize.height // 核心修正代码 // 限制水印在视频帧范围内 x = max(0, min(x, size.width - scaledSize.width)) y = max(0, min(y, size.height - scaledSize.height)) // 绘制水印 watermarkImage.draw(in: CGRect(x: x, y: y, width: scaledSize.width, height: scaledSize.height)) // 获取合成后的图片 return UIGraphicsGetImageFromCurrentImageContext() } } extension UIView { func convertToImage() -> UIImage { // 定义变量存储结果 var resultImage: UIImage = UIImage() // 强制在主线程执行 UI 相关操作 DispatchQueue.main.sync { // 确保在主线程创建图像上下文 - 使用设备的实际缩放因子 UIGraphicsBeginImageContextWithOptions(self.bounds.size, self.isOpaque, UIScreen.main.scale) defer { UIGraphicsEndImageContext() } // 确保在主线程渲染图层 if let context = UIGraphicsGetCurrentContext() { self.layer.render(in: context) if let image = UIGraphicsGetImageFromCurrentImageContext() { resultImage = image } } } return resultImage } } // 相机权限状态 enum CameraAuthorizationStatus { case authorized case denied case notDetermined case restricted } // 相机错误类型 enum CameraError: Error { case captureSessionAlreadyRunning case captureSessionIsMissing case inputsAreInvalid case invalidOperation case noCameraAvailable case torchUnavailable case unknown } // 相机代理协议 protocol CameraViewDelegate: AnyObject { func cameraView(_ cameraView: CustomCameraView, didCapturePhoto photo: UIImage) func cameraView(_ cameraView: CustomCameraView, didFailWithError error: Error) func cameraViewDidChangeAuthorizationStatus(_ cameraView: CustomCameraView, status: CameraAuthorizationStatus) // 视频录制状态回调 func cameraViewDidStartRecording(_ cameraView: CustomCameraView) func cameraViewDidStopRecording(_ cameraView: CustomCameraView, videoURL: URL?) } class CustomCameraView: UIView { // MARK: - 属性 weak var delegate: CameraViewDelegate? // 相机设备 private var captureSession: AVCaptureSession? private var videoDeviceInput: AVCaptureDeviceInput? private var photoOutput: AVCapturePhotoOutput? private var videoDataOutput: AVCaptureVideoDataOutput? private var previewLayer: AVCaptureVideoPreviewLayer? // 用于视频数据处理的队列 private let videoDataOutputQueue = DispatchQueue(label: "com.example.videoDataOutput") // 视频录制相关属性 private var assetWriter: AVAssetWriter? private var assetWriterInput: AVAssetWriterInput? private var assetWriterInputPixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor? private var isRecording = false private var recordingStartTime: CMTime? private var videoOutputURL: URL? // 存储最后一个样本缓冲区,用于获取视频尺寸 private var lastSampleBuffer: CMSampleBuffer? // 水印位置 var watermarkPosition: CGPoint? // 添加专用串行队列用于AVAssetWriter操作 private let recordingQueue = DispatchQueue(label: "com.CheckInCamera.recordingQueue") // 当前相机设置 private var currentCameraPosition: AVCaptureDevice.Position = .back private var currentFlashMode: AVCaptureDevice.FlashMode = .auto /// 当前缩放比例 private var currentZoomFactor: CGFloat = 1.0 private let minZoomFactor: CGFloat = 1.0 private let maxZoomFactor: CGFloat = 5.0 private var isTorchOn = false private var processedImageSize: CGSize? // 1. 预先渲染水印(只需一次) private var watermarkImage: CGImage? // 视图生命周期 override init(frame: CGRect) { super.init(frame: frame) setupView() } required init?(coder: NSCoder) { super.init(coder: coder) setupView() } deinit { stopSession() } // MARK: - 初始化方法 private func setupView() { // 设置预览层 previewLayer = AVCaptureVideoPreviewLayer() previewLayer?.videoGravity = .resizeAspectFill previewLayer?.masksToBounds = true layer.addSublayer(previewLayer!) // 检查相机权限 checkCameraAuthorization() } override func layoutSubviews() { super.layoutSubviews() previewLayer?.frame = bounds } // MARK: - 相机权限管理 func checkCameraAuthorization() { switch AVCaptureDevice.authorizationStatus(for: .video) { case .authorized: delegate?.cameraViewDidChangeAuthorizationStatus(self, status: .authorized) setupCaptureSession() case .notDetermined: delegate?.cameraViewDidChangeAuthorizationStatus(self, status: .notDetermined) requestCameraAccess() case .denied, .restricted: delegate?.cameraViewDidChangeAuthorizationStatus(self, status: .denied) @unknown default: delegate?.cameraViewDidChangeAuthorizationStatus(self, status: .restricted) } } private func requestCameraAccess() { AVCaptureDevice.requestAccess(for: .video) { [weak self] granted in DispatchQueue.main.async { if granted { self?.delegate?.cameraViewDidChangeAuthorizationStatus(self!, status: .authorized) self?.setupCaptureSession() } else { self?.delegate?.cameraViewDidChangeAuthorizationStatus(self!, status: .denied) } } } } // MARK: - 相机设置 private func setupCaptureSession() { captureSession = AVCaptureSession() captureSession?.sessionPreset = .hd1920x1080 // 设置输入设备 do { try setupCameraInputs() setupPhotoOutput() setupVideoDataOutput() // 添加视频数据输出 } catch { delegate?.cameraView(self, didFailWithError: error) } // 开始会话 startSession() } private func setupCameraInputs() throws { guard let captureSession = captureSession else { throw CameraError.captureSessionIsMissing } // 获取相机设备 let cameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentCameraPosition) guard let device = cameraDevice else { throw CameraError.noCameraAvailable } // 创建设备输入 let deviceInput = try AVCaptureDeviceInput(device: device) // 检查并添加输入 if captureSession.canAddInput(deviceInput) { captureSession.addInput(deviceInput) videoDeviceInput = deviceInput } else { throw CameraError.inputsAreInvalid } // 更新预览层 previewLayer?.session = captureSession } private func setupPhotoOutput() { guard let captureSession = captureSession else { return } photoOutput = AVCapturePhotoOutput() photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil) if captureSession.canAddOutput(photoOutput!) { captureSession.addOutput(photoOutput!) } } // 设置视频数据输出方法 private func setupVideoDataOutput() { guard let captureSession = captureSession else { return } videoDataOutput = AVCaptureVideoDataOutput() // 设置视频格式 videoDataOutput?.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)] // 设置总是丢弃过时的帧 videoDataOutput?.alwaysDiscardsLateVideoFrames = true // 设置代理和队列 videoDataOutput?.setSampleBufferDelegate(self, queue: videoDataOutputQueue) if captureSession.canAddOutput(videoDataOutput!) { captureSession.addOutput(videoDataOutput!) } } // MARK: - 视频录制方法 /// 开始录制视频 func startRecording() { if isRecording { delegate?.cameraView(self, didFailWithError: CameraError.invalidOperation) return } do { // 创建视频输出URL let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! let dateFormatter = DateFormatter() dateFormatter.dateFormat = "yyyyMMddHHmmss" let fileName = "video_\(dateFormatter.string(from: Date())).mp4" videoOutputURL = documentsDirectory.appendingPathComponent(fileName) // 确保删除已存在的文件 if FileManager.default.fileExists(atPath: videoOutputURL!.path) { try FileManager.default.removeItem(at: videoOutputURL!) } // 创建AVAssetWriter assetWriter = try AVAssetWriter(outputURL: videoOutputURL!, fileType: .mp4) var (videoWidth, videoHeight) = getPortraitResolution() // 设置视频输入 // 确保使用正确的宽高比 if let processedSize = processedImageSize { // 检查宽高是否有效,避免除以零 guard processedSize.width > 0, processedSize.height > 0 else { let defaultAspectRatio: CGFloat = 9.0 / 16.0 videoHeight = Int(CGFloat(videoWidth) / defaultAspectRatio) videoHeight = max(videoHeight, 1) // 确保至少为1 print("处理后图像尺寸无效,使用默认宽高比: \(videoWidth)x\(videoHeight)") return } // 使用处理后图像的宽高比 let aspectRatio = processedSize.width / processedSize.height // 检查比例是否有效 guard aspectRatio > 0, !aspectRatio.isInfinite, !aspectRatio.isNaN else { videoHeight = 1920 print("宽高比无效,使用默认高度: \(videoWidth)x\(videoHeight)") return } // 关键修复:使用CGFloat完整计算,避免类型转换导致的精度丢失 let calculatedHeight = CGFloat(videoWidth) / aspectRatio // 确保高度有效且不为零 guard calculatedHeight > 0, !calculatedHeight.isInfinite, !calculatedHeight.isNaN else { videoHeight = 1920 print("计算高度无效,使用默认高度: \(videoWidth)x\(videoHeight)") return } videoHeight = Int(calculatedHeight) videoHeight = max(videoHeight, 1) // 确保高度至少为1 print("使用处理后图像宽高比: \(videoWidth)x\(videoHeight)") } else if let sampleBuffer = lastSampleBuffer, let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { // 使用相机捕获图像的宽高比 let bufferWidth = CGFloat(CVPixelBufferGetWidth(imageBuffer)) let bufferHeight = CGFloat(CVPixelBufferGetHeight(imageBuffer)) let aspectRatio = bufferWidth / bufferHeight // 保持宽度,根据宽高比调整高度 videoHeight = videoWidth / Int(aspectRatio) print("使用相机捕获宽高比: \(videoWidth)x\(videoHeight)") } let bitsPerPixel: Float = 0.5 // 调整这个值来控制质量 (0.1-0.3) let frameRate: Int = 30 let bitsPerSecond = Int(Float(videoWidth * videoHeight * frameRate) * bitsPerPixel) let videoSettings: [String: Any] = [ AVVideoCodecKey: AVVideoCodecType.h264, AVVideoWidthKey: 1280, AVVideoHeightKey: 720, AVVideoScalingModeKey: AVVideoScalingModeResizeAspectFill, AVVideoCompressionPropertiesKey: [ AVVideoAverageBitRateKey: 20_000_000, // 直接设置为12 Mbps,确保高质量 AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel, AVVideoMaxKeyFrameIntervalKey: frameRate, // 每两秒一个关健帧 AVVideoAllowFrameReorderingKey: true, AVVideoExpectedSourceFrameRateKey: frameRate, AVVideoQualityKey: 0.9, // 质量设置 (0.0-1.0) AVVideoMaxKeyFrameIntervalDurationKey: 1, // 关键帧最大间隔(秒) ] ] assetWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings) assetWriterInput?.expectsMediaDataInRealTime = true // 创建像素缓冲区适配器 let sourcePixelBufferAttributes: [String: Any] = [ kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA), kCVPixelBufferWidthKey as String: videoWidth, kCVPixelBufferHeightKey as String: videoHeight, kCVPixelBufferIOSurfacePropertiesKey as String: [:] // 添加IOSurface支持 ] assetWriterInputPixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor( assetWriterInput: assetWriterInput!, sourcePixelBufferAttributes: sourcePixelBufferAttributes ) // 添加输入 if assetWriter?.canAdd(assetWriterInput!) ?? false { assetWriter?.add(assetWriterInput!) } // 开始写入(此时不启动会话,等待第一个帧的时间戳) assetWriter?.startWriting() isRecording = true recordingStartTime = nil // 重置为nil,等待第一个帧 // 通知代理 DispatchQueue.main.async { self.delegate?.cameraViewDidStartRecording(self) } } catch { delegate?.cameraView(self, didFailWithError: error) isRecording = false assetWriter = nil assetWriterInput = nil assetWriterInputPixelBufferAdaptor = nil } } /// 停止录制视频 func stopRecording() { recordingQueue.async { [weak self] in guard let self = self, self.isRecording else { return } self.isRecording = false guard let assetWriter = self.assetWriter, let assetWriterInput = self.assetWriterInput else { self.handleRecordingError(nil) return } // 标记输入完成 assetWriterInput.markAsFinished() // 完成写入并保存视频 assetWriter.finishWriting { [weak self] in guard let self = self else { return } if assetWriter.status == .completed { // 保存视频到相册(可选) if let videoURL = self.videoOutputURL { UISaveVideoAtPathToSavedPhotosAlbum( videoURL.path, nil, nil, nil ) } // 通知代理视频路径 DispatchQueue.main.async { self.delegate?.cameraViewDidStopRecording(self, videoURL: self.videoOutputURL) } } else { self.handleRecordingError(assetWriter.error) } // 清理资源 self.assetWriter = nil self.assetWriterInput = nil self.assetWriterInputPixelBufferAdaptor = nil self.recordingStartTime = nil } } } /// 1. 确定竖屏分辨率 - 优先使用设备支持的最佳竖屏分辨率 func getPortraitResolution() -> (width: Int, height: Int) { let session = AVCaptureDevice.DiscoverySession( deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .back ) guard let device = session.devices.first else { return (1080, 1920) } var bestResolution: CMVideoDimensions? var bestPixelCount = 0 // 直接遍历而不创建中间数组 for format in device.formats { let dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription) // 跳过横屏分辨率 guard dimensions.height > dimensions.width else { continue } // 计算像素数 let pixelCount = Int(dimensions.width) * Int(dimensions.height) // 更新最佳分辨率 if pixelCount > bestPixelCount { bestPixelCount = pixelCount bestResolution = dimensions } } if let best = bestResolution { return (Int(best.width), Int(best.height)) } // 默认值 return (1080, 1920) } // 错误处理方法 private func handleRecordingError(_ error: Error?) { isRecording = false assetWriter = nil assetWriterInput = nil assetWriterInputPixelBufferAdaptor = nil recordingStartTime = nil DispatchQueue.main.async { self.delegate?.cameraView(self, didFailWithError: error ?? CameraError.unknown) } } // MARK: - 相机控制 func startSession() { guard let captureSession = captureSession, !captureSession.isRunning else { return } DispatchQueue.global(qos: .userInitiated).async { captureSession.startRunning() } } func stopSession() { guard let captureSession = captureSession, captureSession.isRunning else { return } DispatchQueue.global(qos: .userInitiated).async { captureSession.stopRunning() } } /// 切换前后摄像头 private let cameraQueue = DispatchQueue(label: "com.example.camera") func switchCamera() { guard let captureSession = captureSession else { return } cameraQueue.async { [weak self] in guard let self = self else { return } if captureSession.isRunning { self.stopSession() } self.currentCameraPosition = (self.currentCameraPosition == .back) ? .front : .back captureSession.inputs.forEach { input in captureSession.removeInput(input) } do { try self.setupCameraInputs() self.startSession() DispatchQueue.main.async { self.previewLayer?.session = captureSession self.previewLayer?.frame = self.bounds } } catch { DispatchQueue.main.async { self.delegate?.cameraView(self, didFailWithError: error) self.currentCameraPosition = (self.currentCameraPosition == .back) ? .front : .back self.cameraQueue.async { try? self.setupCameraInputs() self.startSession() DispatchQueue.main.async { self.previewLayer?.session = captureSession self.previewLayer?.frame = self.bounds } } } } } } /// 闪光灯设置 func setFlashMode(_ flashMode: AVCaptureDevice.FlashMode) { currentFlashMode = flashMode } /// 拍照片 func capturePhoto() { guard let photoOutput = photoOutput else { return } let settings = AVCapturePhotoSettings() settings.flashMode = currentFlashMode photoOutput.capturePhoto(with: settings, delegate: self) } // MARK: - 灯光控制 func toggleTorch() { guard let device = AVCaptureDevice.default(for: .video), device.hasTorch else { delegate?.cameraView(self, didFailWithError: CameraError.noCameraAvailable) return } do { try device.lockForConfiguration() defer { device.unlockForConfiguration() } if device.isTorchAvailable { if device.torchMode == .off { try device.setTorchModeOn(level: AVCaptureDevice.maxAvailableTorchLevel) isTorchOn = true } else { device.torchMode = .off isTorchOn = false } } } catch { delegate?.cameraView(self, didFailWithError: error) } } /// 水印视图 var watermarkView: UIView? /// 设置缩放比例 func setZoom(scale: CGFloat) { guard let device = AVCaptureDevice.default(for: .video) else { return } do { try device.lockForConfiguration() defer { device.unlockForConfiguration() } let effectiveScale = min(max(scale, minZoomFactor), maxZoomFactor) currentZoomFactor = effectiveScale device.videoZoomFactor = effectiveScale } catch { delegate?.cameraView(self, didFailWithError: error) } } func setTorch(on: Bool) { guard on != isTorchOn else { return } toggleTorch() } } // MARK: - AVCapturePhotoCaptureDelegate extension CustomCameraView: AVCapturePhotoCaptureDelegate { func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { if let error = error { delegate?.cameraView(self, didFailWithError: error) return } guard let imageData = photo.fileDataRepresentation() else { delegate?.cameraView(self, didFailWithError: CameraError.unknown) return } if let image = UIImage(data: imageData) { delegate?.cameraView(self, didCapturePhoto: image) } else { delegate?.cameraView(self, didFailWithError: CameraError.unknown) } } } // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate extension CustomCameraView: AVCaptureVideoDataOutputSampleBufferDelegate { // 在 CustomCameraView 中添加方法,将预览坐标转换为视频坐标 private func convertPreviewFrameToVideoFrame(previewFrame: CGRect) -> CGRect { guard let videoWidth = assetWriterInput?.outputSettings?[AVVideoWidthKey] as? CGFloat, let videoHeight = assetWriterInput?.outputSettings?[AVVideoHeightKey] as? CGFloat else { return previewFrame } // 预览层尺寸(即当前视图尺寸) let previewSize = bounds.size // 计算预览与视频的缩放比例 let scaleX = videoWidth / previewSize.width let scaleY = videoHeight / previewSize.height // 转换坐标和尺寸 return CGRect( x: previewFrame.origin.x * scaleX, y: previewFrame.origin.y * scaleY, width: previewFrame.width * scaleX, height: previewFrame.height * scaleY ) } // 将CMSampleBuffer转换为UIImage的方法(处理方向) func convertSampleBufferToUIImage(_ sampleBuffer: CMSampleBuffer) -> UIImage? { guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil } // 1. 创建 CIImage var ciImage = CIImage(cvPixelBuffer: imageBuffer) // 2. 获取视频连接的方向(优先使用连接的方向) var videoOrientation: AVCaptureVideoOrientation = .portrait if let connection = videoDataOutput?.connection(with: .video), connection.isVideoOrientationSupported { videoOrientation = connection.videoOrientation } else { // 如果没有连接信息,使用设备方向 switch UIDevice.current.orientation { case .portrait: videoOrientation = .portrait case .portraitUpsideDown: videoOrientation = .portraitUpsideDown case .landscapeLeft: videoOrientation = .landscapeRight case .landscapeRight: videoOrientation = .landscapeLeft default: videoOrientation = .portrait } } // 3. 应用方向修正 let orientation: CGImagePropertyOrientation switch (videoOrientation, currentCameraPosition) { case (.portrait, .back): orientation = .up case (.portrait, .front): orientation = .upMirrored case (.portraitUpsideDown, .back): orientation = .down case (.portraitUpsideDown, .front): orientation = .downMirrored case (.landscapeRight, .back): orientation = .right case (.landscapeRight, .front): orientation = .rightMirrored case (.landscapeLeft, .back): orientation = .left case (.landscapeLeft, .front): orientation = .leftMirrored default: orientation = .up } // 应用方向变换 ciImage = ciImage.oriented(forExifOrientation: Int32(orientation.rawValue)) // 4. 对于前置摄像头,可能需要额外的水平翻转 if currentCameraPosition == .front { ciImage = ciImage.transformed(by: CGAffineTransform(scaleX: -1, y: 1)) } // 5. 转换为 UIImage(保持高质量) let context = CIContext(options: [ .useSoftwareRenderer: false, .highQualityDownsample: true, .workingColorSpace: CGColorSpaceCreateDeviceRGB() ]) guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return nil } // 创建 UIImage,确保方向正确 let image = UIImage(cgImage: cgImage, scale: 1.0, orientation: .up) return image } func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard isRecording else { return } // 存储最后一个样本缓冲区 self.lastSampleBuffer = sampleBuffer // 处理水印 guard let image = self.convertSampleBufferToUIImage(sampleBuffer), let watermarkView = self.watermarkView else { return } self.processedImageSize = image.size var targetFrame = CGRect.zero DispatchQueue.main.sync { targetFrame = convertPreviewFrameToVideoFrame( previewFrame: watermarkView .frame) } guard let watermarkedImage = image.addVideoWatermark(watermarkView, targetFrame: targetFrame) else { return } guard let pixelBuffer = self.imageToCVPixelBuffer(image: watermarkedImage) else { return } recordingQueue.async { [weak self] in guard let self = self, self.isRecording, let assetWriter = self.assetWriter, let assetWriterInput = self.assetWriterInput, let adaptor = self.assetWriterInputPixelBufferAdaptor else { return } let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) // 首次写入时启动会话(关键修复) if self.recordingStartTime == nil { self.recordingStartTime = timestamp assetWriter.startSession(atSourceTime: timestamp) } // 确保会话已启动且输入准备好 guard assetWriter.status == .writing, assetWriterInput.isReadyForMoreMediaData else { print("录制状态异常: \(assetWriter.status.rawValue), 输入准备状态: \(assetWriterInput.isReadyForMoreMediaData)") return } // 验证像素缓冲区尺寸 let bufferWidth = CVPixelBufferGetWidth(pixelBuffer) let bufferHeight = CVPixelBufferGetHeight(pixelBuffer) tlog.debug("像素缓冲区尺寸: \(bufferWidth)x\(bufferHeight)") // 追加像素缓冲区 if !adaptor.append(pixelBuffer, withPresentationTime: timestamp) { print("追加帧失败: \(assetWriter.error?.localizedDescription ?? "未知错误")") } } } // UIImage -> CVPixelBuffer (修正颜色和变形问题) func imageToCVPixelBuffer(image: UIImage) -> CVPixelBuffer? { let width = Int(image.size.width) let height = Int(image.size.height) let attrs = [ kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue, kCVPixelBufferIOSurfacePropertiesKey: [:] as CFDictionary ] as CFDictionary var pixelBuffer: CVPixelBuffer? let status = CVPixelBufferCreate( kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, attrs, &pixelBuffer ) guard status == kCVReturnSuccess, let buffer = pixelBuffer else { print("创建像素缓冲区失败,状态: \(status)") return nil } CVPixelBufferLockBaseAddress(buffer, []) defer { CVPixelBufferUnlockBaseAddress(buffer, []) } // 使用更直接的方式将图像数据写入像素缓冲区 guard let context = CGContext( data: CVPixelBufferGetBaseAddress(buffer), width: width, height: height, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(buffer), space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue ), let cgImage = image.cgImage else { return nil } context.draw(cgImage, in: CGRect(x: 0, y: 0, width: width, height: height)) return buffer } } extension UIImage { // 从 CVPixelBuffer 创建 UIImage convenience init?(pixelBuffer: CVPixelBuffer) { // 锁定像素缓冲区 CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) defer { CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) } // 获取像素缓冲区信息 let width = CVPixelBufferGetWidth(pixelBuffer) let height = CVPixelBufferGetHeight(pixelBuffer) let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer) // 创建 CGImage let colorSpace = CGColorSpaceCreateDeviceRGB() let bitmapInfo = CGBitmapInfo( rawValue: CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue ) guard let context = CGContext( data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo.rawValue ), let cgImage = context.makeImage() else { return nil } // 初始化 UIImage self.init(cgImage: cgImage) } } 帮我分析一下 let videoSettings: [String: Any] = [ AVVideoCodecKey: AVVideoCodecType.h264, AVVideoWidthKey: 1280, AVVideoHeightKey: 720, AVVideoScalingModeKey: AVVideoScalingModeResizeAspectFill, AVVideoCompressionPropertiesKey: [ AVVideoAverageBitRateKey: 20_000_000, // 直接设置为12 Mbps,确保高质量 AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel, AVVideoMaxKeyFrameIntervalKey: frameRate, // 每两秒一个关健帧 AVVideoAllowFrameReorderingKey: true, AVVideoExpectedSourceFrameRateKey: frameRate, AVVideoQualityKey: 0.9, // 质量设置 (0.0-1.0) AVVideoMaxKeyFrameIntervalDurationKey: 1, // 关键帧最大间隔(秒) ] ] 这里这样设置了 画面是清晰了,但录制的视频播放画面卡吨,添加的水印也没有显示出来,只显示画面一问部分
08-27
下面是个自定义的播放器吗?我可以怎么在我的controller里面用呢: import Cocoa import TPBMDesignKit enum TPSightMediaPlayerType { case preview case playback } let TPSS_MP_FINISH_REASON_RELAY_TIMEOUT = 65 //上层使用的错误码,从65开始,C层错误0-64 class TPSightMediaPlayerController: TPBaseViewController/*, TPTouchViewDelegate*/ { // // MARK: TPTouchViewDelegate // func didTouched() { // print(">>>>> didTouched一直调用") // self.updateRelayPlayTimer() // } var playType : TPSightMediaPlayerType = .preview { didSet { playerPageManager.playType = playType } } var playerPageManager: TPGuardPlayerPageManager = TPGuardPlayerPageManager() var toolBar : TPPlayerToolBarViewController = TPPlayerToolBarViewController() var eventFilterView : TPGuardEventFilterView = TPGuardEventFilterView() var currentPage : TPSightMediaPlayerPage = TPSightMediaPlayerPage() var currentDSTInfo : TPSSDSTInfo? var pageControllerList : [TPSightMediaPlayerPageController] = [] { didSet { toolBar.totalPages = pageControllerList.count } } var currentPageViewController : TPSightMediaPlayerPageController? { didSet { if let currentPageViewController { pageContentView.addSubview(currentPageViewController.view) updateScollerViewConstraints() playerPageManager.startPlayer(for: currentPageIndex) currentPageViewController.refreshPlayer() toolBar.currentPageNumber = currentPageIndex } else { playerPageManager.stopAllPlayer() } if currentPageViewController != oldValue { oldValue?.view.removeFromSuperview() } // setupToolBar() } } var currentPageMode : TPSightMediaPlayerPageMode = .single { didSet { if currentPageMode != .autoFit { playerPageManager.adjustPageItem(for: currentPageMode, currentPage: currentPageIndex, selectItem: currentPageViewController?.selectPlayerItem) updatePageControllerList() // setupToolBar() firstEnterView.isHidden = true } } } var effectivePageMode : TPSightMediaPlayerPageMode = .single var originPageMode: TPSightMediaPlayerPageMode = .single var scrollView: NSView = NSView() var pageContentView : NSView = NSView() var firstEnterView: NSView = NSView() var firstEnterImageView: NSImageView = NSImageView() var firstEnterLabel: TPBLabel = TPBLabel() //监听屏幕点击事件View // var touchView: TPTouchView? var eventMonitor: EventMonitor? var windowConfigList : [TPSSWindowConfig] = [] private var deviceIDList : [TPSSDeviceId] = [] private var playerPageList: [[TPGuardPlayerItem]] = [[]] private var zoomPopover: NSPopover = NSPopover() private var popover = TPBPopover() private(set) var videoEditController = TPGuardVideoEditController() private var videoExportController = TPGuardVideoExportController() private var ptzViewController = TPGuardPTZViewController() private(set) var eventListController = TPGuardEventListController() let leftSideButton: TPBSideButton = TPBSideButton() var eventListHasHidden: Bool = false var eventListOriginFrame = NSRect.zero var playerItemList : [TPGuardPlayerItem] = [] var appContext = TPAppContextFactory.shared() var originFrame = NSRect.zero var isChangingFullScreenMode = false { didSet { pageControllerList.forEach({ $0.enablePopover = isChangingFullScreenMode }) } } var currentPageIndex: Int { if let currentPageViewController { return self.getPageIndex(for: currentPageViewController) } return 0 } private var inquireItemsRequestID: TPSSCode = -1 // 存对应播放器的有回放的日期 private var currentPlayerDateDictionary:[TPSSDeviceId:[Date]] = [:] override func viewDidLoad() { super.viewDidLoad() // //增加TouchView判断屏幕点击事件 // setupTouchView() setupToolBar() //添加鼠标键盘事件监听 eventMonitor = EventMonitor(eventMask: [.mouseEntered, .cursorUpdate, .mouseExited, .leftMouseDown, .leftMouseUp, .rightMouseDown, .rightMouseUp, .mouseMoved, .leftMouseDragged, .rightMouseDragged, .keyDown, .keyUp], handler: { [weak self] event in if let event = event { // print("Event received: \(event)") self?.updateRelayPlayTimer() } else { print("Global event monitor stopped.") } }) eventMonitor?.start() // NotificationCenter.default.addObserver(self, selector: #selector(appResignActive), name: NSApplication.didResignActiveNotification, object: nil) // NotificationCenter.default.addObserver(self, selector: #selector(appBecomeActive), name: NSApplication.willBecomeActiveNotification, object: nil) } deinit { eventMonitor?.stop() removeFastKey() }
10-10
import UIKit import CoreLocation import CoreBluetooth protocol PositionProviderDelegate: AnyObject { func didUpdate(position: Position) } class PositionProvider: NSObject, CLLocationManagerDelegate, CBCentralManagerDelegate { weak var delegate: PositionProviderDelegate? let userDefaults: UserDefaults var locationManager: CLLocationManager var lastLocation: CLLocation? var deviceId: String var interval: Double var distance: Double var angle: Double var startStopTimer: Timer? var beaconRegion: CLBeaconRegion? var centralManager: CBCentralManager? var isScanning = false var scanTimer: Timer? var keepAlive = false var pendingStart = false override init() { userDefaults = UserDefaults.standard deviceId = userDefaults.string(forKey: "device_id_preference")! interval = userDefaults.double(forKey: "frequency_preference") distance = userDefaults.double(forKey: "distance_preference") angle = userDefaults.double(forKey: "angle_preference") locationManager = CLLocationManager() super.init() locationManager.delegate = self locationManager.pausesLocationUpdatesAutomatically = false switch userDefaults.string(forKey: "accuracy_preference") ?? "medium" { case "high": locationManager.desiredAccuracy = kCLLocationAccuracyBestForNavigation case "low": locationManager.desiredAccuracy = kCLLocationAccuracyHundredMeters default: locationManager.desiredAccuracy = kCLLocationAccuracyBest } if #available(iOS 9.0, *) { locationManager.allowsBackgroundLocationUpdates = true } } func startUpdates(distance: Double) { locationManager.distanceFilter = distance locationManager.activityType = .other if (distance > 10.0) { locationManager.desiredAccuracy = kCLLocationAccuracyHundredMeters } else { switch userDefaults.string(forKey: "accuracy_preference") ?? "medium" { case "high": locationManager.desiredAccuracy = kCLLocationAccuracyBest case "low": locationManager.desiredAccuracy = kCLLocationAccuracyHundredMeters default: locationManager.desiredAccuracy = kCLLocationAccuracyNearestTenMeters } } if HXGPSController.shared.isSingleRealTimeLocation == false { keepAlive = distance > 10.0 } interval = userDefaults.double(forKey: "frequency_preference") if #available(iOS 14.0, *) { // iOS 14 及以上版本 switch locationManager.authorizationStatus { case .authorizedAlways: locationManager.startUpdatingLocation() startScanCycle() default: pendingStart = true locationManager.requestAlwaysAuthorization() } } else { // iOS 13 及以下版本 switch CLLocationManager.authorizationStatus() { case .authorizedAlways: locationManager.startUpdatingLocation() startScanCycle() default: pendingStart = true locationManager.requestAlwaysAuthorization() } } } func stopUpdates(isClearLastLocation: Bool = true) { locationManager.stopUpdatingLocation() if isClearLastLocation { lastLocation = nil } stopControlTimer() // 停止扫描循环 scanTimer?.invalidate() scanTimer = nil stopBeaconScan() } // 启动蓝牙扫描定时器 func startScanCycle() { scanTimer?.invalidate() if userDefaults.string(forKey: "bluetooth_open_status") == "0" { return } // 开始扫描 startBeaconScan() // 启动定时器,10秒后停止扫描 let gap = Double(userDefaults.string(forKey:"bluetooth_interval") ?? "10") ?? 10.0 scanTimer = Timer.scheduledTimer(withTimeInterval: gap + 3.0, repeats: true) { [weak self] timer in guard let self = self else { return } if self.isScanning { // 当前在扫描状态,停止扫描 self.stopBeaconScan() // 5秒后重新开始扫描 let bleDelay = Double(userDefaults.string(forKey:"bluetooth_duration") ?? "5") ?? 5.0 DispatchQueue.main.asyncAfter(deadline: .now() + bleDelay + 3.0) { self.startBeaconScan() } } else { // 当前在停止状态,开始扫描 self.startBeaconScan() } } } // 启动蓝牙扫描 func startBeaconScan() { if !isScanning { let beaconConstraint = CLBeaconIdentityConstraint(uuid: UUID(uuidString: "FDA50693-A4E2-4FB1-AFCF-C6EB07647825")!) locationManager.startRangingBeacons(satisfying: beaconConstraint) isScanning = true } } // 停止蓝牙扫描 func stopBeaconScan() { if isScanning { let beaconConstraint = CLBeaconIdentityConstraint(uuid: UUID(uuidString: "FDA50693-A4E2-4FB1-AFCF-C6EB07647825")!) locationManager.stopRangingBeacons(satisfying: beaconConstraint) isScanning = false // 更新 UserDefaults 中的 ble 值 let bleValues = HXGPSController.shared.discoveredDevices.values.map { String(describing: $0) }.joined(separator: ",") userDefaults.set(bleValues, forKey: "ble") userDefaults.set(bleValues, forKey: "ble_for_check_in") } } // 启动定位启停控制定时器 func startControlTimer() { if (startStopTimer != nil) { startStopTimer?.invalidate(); } startStopTimer = Timer.scheduledTimer(timeInterval: TimeInterval(interval >= 0 && interval - 30 >= 30 ? interval - 30 : 30), target: self, selector: #selector(controlUpdatingLocation), userInfo: nil, repeats: true) } // 停止定位启停控制定时器 func stopControlTimer() { startStopTimer?.invalidate(); startStopTimer = nil } // 启停定时器回调 @objc func controlUpdatingLocation() { self.locationManager.stopUpdatingLocation() self.locationManager.distanceFilter = 0.0 self.locationManager.startUpdatingLocation() } // 撤回始终后台定位权限 func revokeAlwaysAuthorization() { // 撤回定位授权,引导用户进入设置页 if let url = URL(string: UIApplication.openSettingsURLString) { if UIApplication.shared.canOpenURL(url) { UIApplication.shared.open(url, options: [:], completionHandler: nil) } } } func getBatteryStatus() -> BatteryStatus { let device = UIDevice.current // NSLog("deviceinfo:\(device)") // NSLog("deviceBatteryState:\(device.batteryState)") // NSLog("deviceBatteryLevel:\(device.batteryLevel)") if device.batteryState != .unknown { return BatteryStatus( level: device.batteryLevel * 100, charging: device.batteryState == .charging || device.batteryState == .full ) } else { return BatteryStatus(level: 0, charging: true) } } func locationManager(_ manager: CLLocationManager, didChangeAuthorization status: CLAuthorizationStatus) { switch status { case .authorizedAlways, .authorizedWhenInUse: // 首次启动应用时不执行 if !HXGPSController.shared.isFirstStart { locationManager.requestAlwaysAuthorization() } if pendingStart { pendingStart = false locationManager.startUpdatingLocation() } default: // 首次启动应用时不执行 if !HXGPSController.shared.isFirstStart { locationManager.requestAlwaysAuthorization() } break } } func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) { // NSLog("locationinfo:\(locations)") // 设备未激活或者班次外不上传服务器 // let deActived = userDefaults.string(forKey: "device_is_actived") ?? "0" let deActived = "1" let stealthMode = userDefaults.string(forKey: "stealth_mode") ?? nil let classBeginEnd = userDefaults.string(forKey: "class_begin_end") ?? nil let chargeEvent = userDefaults.string(forKey: "charge_event") ?? nil let isLocation = userDefaults.string(forKey: "is_location"); // 处理flutter中获取实时位置 if HXGPSController.shared.isSingleRealTimeLocation { var tmpRealTimeLocation: CLLocation? if let realTimeLocation = locations.last { userDefaults.setValue("\(realTimeLocation.coordinate.latitude)&\(realTimeLocation.coordinate.longitude)", forKey: "camera_lat_lng") HXGPSController.shared.isSingleRealTimeLocation = false tmpRealTimeLocation = realTimeLocation } if (isLocation == "0" || keepAlive) { DispatchQueue.main.async { if let updateLastLocation = tmpRealTimeLocation { self.lastLocation = CLLocation( coordinate: updateLastLocation.coordinate, altitude: updateLastLocation.altitude, horizontalAccuracy: updateLastLocation.horizontalAccuracy, verticalAccuracy: updateLastLocation.verticalAccuracy, course: updateLastLocation.course, speed: updateLastLocation.speed, timestamp: Date() ) } self.locationManager.stopMonitoringSignificantLocationChanges() self.stopUpdates(isClearLastLocation: false) self.startUpdates(distance: 5000.0) } } } // 判断是否需要上报位置 if (deActived == "0" || isLocation == "0" || keepAlive || (stealthMode == "1" && classBeginEnd == nil && chargeEvent == nil)) { return } // 处理定位以及相关数据 if let location = locations.last { if lastLocation == nil || location.timestamp.timeIntervalSince(lastLocation!.timestamp) >= interval || (distance > 0 && DistanceCalculator.distance(fromLat: location.coordinate.latitude, fromLon: location.coordinate.longitude, toLat: lastLocation!.coordinate.latitude, toLon: lastLocation!.coordinate.longitude) >= distance) || (angle > 0 && fabs(location.course - lastLocation!.course) >= angle) { // 添加定位数据更新运行log if let locationUploadLog = userDefaults.string(forKey: "location_fresh_log") { userDefaults.setValue("\(locationUploadLog)\(HXGPSController.shared.getHHmmss())+", forKey: "location_fresh_log") } else { userDefaults.setValue("\(HXGPSController.shared.getHHmmss())+", forKey: "location_fresh_log") } let position = Position(managedObjectContext: DatabaseHelper().managedObjectContext) position.deviceId = deviceId let batteryStatus = getBatteryStatus() position.batt = Int(batteryStatus.level) as NSNumber position.charge = batteryStatus.charging as Bool position.gpsNumber = 12 as NSNumber position.gpsIntensity = 99 as NSNumber position.isHeartbeat = "1" as NSString position.valid = 1 as NSNumber let bleStr = userDefaults.string(forKey: "ble") ?? "" position.ble = bleStr as NSString let mainBundle = Bundle.main if let appVersion = mainBundle.infoDictionary?["CFBundleShortVersionString"] as? String, let _ = mainBundle.infoDictionary?["CFBundleVersion"] as? String { position.buildVersion = appVersion as NSString userDefaults.setValue(appVersion, forKey: "build_version") } else { position.buildVersion = "1.0" as NSString userDefaults.setValue("1.0", forKey: "build_version") } // 保存心跳数据 if (lastLocation == nil) { userDefaults.setValue(String(describing: Int(Date().timeIntervalSince1970)), forKey: "gps_time") } else { userDefaults.setValue(String(describing: Int(location.timestamp.timeIntervalSince1970)), forKey: "gps_time") } let tmpBatt = Int(batteryStatus.level) let tmpCharge = Bool(batteryStatus.charging) userDefaults.setValue(tmpBatt, forKey: "batt") userDefaults.setValue(tmpCharge ? 1 : 0, forKey: "charge") userDefaults.setValue(location.horizontalAccuracy, forKey: "location_accuracy") userDefaults.setValue(location.course, forKey: "location_bearing") userDefaults.setValue(location.altitude, forKey: "location_altitude") if let step = userDefaults.string(forKey: "step_number") { position.step = step as NSString } if let state = userDefaults.string(forKey: "cstate") { position.cstate = state as NSString } if let isMove = userDefaults.string(forKey: "current_mode") { position.isMove = "\(isMove == "1" ? 1 : 0)" as NSString } if let isStealthMode = userDefaults.string(forKey: "stealth_mode") { position.isVisible = "\(isStealthMode)" as NSString } position.isCharge = "\(tmpCharge ? 1 : 0)" as NSString if let isTracking = userDefaults.string(forKey: "is_location") { position.isTracking = "\(isTracking)" as NSString } position.isReturn = "" as NSString // 保存当前定位速度 userDefaults.setValue(location.speed, forKey: "location_speed") let tmpLocation = CLLocation( coordinate: location.coordinate, altitude: location.altitude, horizontalAccuracy: location.horizontalAccuracy, verticalAccuracy: location.verticalAccuracy, course: location.course, speed: location.speed, timestamp: location.timestamp ) position.setLocation(tmpLocation) if (lastLocation == nil && Date().timeIntervalSince(location.timestamp) >= 5) { lastLocation = CLLocation( coordinate: location.coordinate, altitude: location.altitude, horizontalAccuracy: location.horizontalAccuracy, verticalAccuracy: location.verticalAccuracy, course: location.course, speed: location.speed, timestamp: Date() ) } else { lastLocation = location } delegate?.didUpdate(position: position) } } } func locationManager(_ manager: CLLocationManager, didFailWithError error: Error) { } // CLBeacon逻辑 func locationManager(_ manager: CLLocationManager, didDetermineState state: CLRegionState, for region: CLRegion) { } func locationManager(_ manager: CLLocationManager, didEnterRegion region: CLRegion) { if let beaconRegion = region as? CLBeaconRegion { } } func locationManager(_ manager: CLLocationManager, didExitRegion region: CLRegion) { if let beaconRegion = region as? CLBeaconRegion { } } func locationManager(_ manager: CLLocationManager, didRangeBeacons beacons: [CLBeacon], in region: CLBeaconRegion) { if beacons.isEmpty { return } // 循环处理所有检测到的 beacon for beacon in beacons { // 检查 major 和 minor 是否符合条件 if beacon.major != nil && beacon.minor != nil && beacon.major.intValue != 1 && beacon.minor.intValue != 2 { // 将 major 拆分成高8位和低8位 let majorValue = UInt16(beacon.major.intValue) let majorHigh = (majorValue >> 8) & 0xFF let majorLow = majorValue & 0xFF // 将 minor 拆分成高8位和低8位 let minorValue = UInt16(beacon.minor.intValue) let minorHigh = (minorValue >> 8) & 0xFF let minorLow = minorValue & 0xFF // 用冒号拼接 let majorMinorStr = String(format: "%02X:%02X:%02X:%02X", majorHigh, majorLow, minorHigh, minorLow) // 拼接mac前缀 let bleIdentifier = "58:06:\(majorMinorStr)" var deviceInfo = [String: Any]() deviceInfo["timestamp"] = Int(Date().timeIntervalSince1970) * 1000 deviceInfo["rssi"] = 100 deviceInfo["mac"] = bleIdentifier deviceInfo["battery"] = 100 if let macAddress = deviceInfo["mac"] as? String { HXGPSController.shared.discoveredDevices[macAddress] = "\(macAddress);\(deviceInfo["rssi"] ?? 0);\(deviceInfo["timestamp"] ?? Int(Date().timeIntervalSince1970) * 1000);\(deviceInfo["battery"] ?? 0)" } } } } // 蓝牙状态更新回调 func centralManagerDidUpdateState(_ central: CBCentralManager) { if central.state == .poweredOn { startScanCycle() } else { // 停止扫描循环 scanTimer?.invalidate() scanTimer = nil HXGPSController.shared.clearBleMap() userDefaults.removeObject(forKey: "ble") userDefaults.removeObject(forKey: "ble_for_check_in") stopBeaconScan() } } }
最新发布
10-12
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值