CGImage 直接到 CVPixelBuffer

本文介绍了从image转换为CVPixelBuffer的高效方法。通过使用CVPixelBufferCreateWithBytes函数,避免了不必要的渲染和内存复制操作,显著提高了性能。文章提供了具体的实现代码,并对比了不同方法的性能表现。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

从image到CVPixelBuffer需要注意性能,如果使用context的话和使用memcpy都有一样的性能支出,但是使用CVPixelBufferCreateWithBytes这个可以在时间上提高好几个数量级别,这是因为这里没有渲染也没有内存拷贝能耗时的操作而只是将data的指针进行了修改哦。

- (CVPixelBufferRef)pixelBufferFaster{
    
    CVPixelBufferRef pxbuffer = NULL;
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    
    size_t width =  CGImageGetWidth(self.image);
    size_t height = CGImageGetHeight(self.image);
    size_t bytesPerRow = CGImageGetBytesPerRow(self.image);

    CFDataRef  dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(self.image));
    GLubyte  *imageData = (GLubyte *)CFDataGetBytePtr(dataFromImageDataProvider);
    
    CVPixelBufferCreateWithBytes(kCFAllocatorDefault,width,height,kCVPixelFormatType_32ARGB,imageData,bytesPerRow,NULL,NULL,(__bridge CFDictionaryRef)options,&pxbuffer);
  
    CFRelease(dataFromImageDataProvider);

    return pxbuffer;

}

- (CVPixelBufferRef)pixelBufferFromCGImageWithPool:(CVPixelBufferPoolRef)pixelBufferPool
{

    CVPixelBufferRef pxbuffer = NULL;
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    
    size_t width =  CGImageGetWidth(self.image);
    size_t height = CGImageGetHeight(self.image);
    size_t bytesPerRow = CGImageGetBytesPerRow(self.image);
    size_t bitsPerComponent = CGImageGetBitsPerComponent(self.image);
    CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(self.image);
    void *pxdata = NULL;
    
    
    if (pixelBufferPool == NULL)
        NSLog(@"pixelBufferPool is null!");
    
    CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, pixelBufferPool, &pxbuffer);
    if (pxbuffer == NULL) {
        status = CVPixelBufferCreate(kCFAllocatorDefault, width,
                                              height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                              &pxbuffer);
     }
    
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
   
    NSParameterAssert(pxdata != NULL);
    
    if(1){
    
        CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
        CGContextRef context = CGBitmapContextCreate(pxdata, width,
                                                     height,bitsPerComponent,bytesPerRow, rgbColorSpace,
                                                     bitmapInfo);
        NSParameterAssert(context);
        CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
        CGContextDrawImage(context, CGRectMake(0, 0, width,height), self.image);
        CGColorSpaceRelease(rgbColorSpace);
        CGContextRelease(context);
    }else{
        
     
        CFDataRef  dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(self.image));
        CFIndex length = CFDataGetLength(dataFromImageDataProvider);
        GLubyte  *imageData = (GLubyte *)CFDataGetBytePtr(dataFromImageDataProvider);
        memcpy(pxdata,imageData,length);

        CFRelease(dataFromImageDataProvider);
    }
    
    
    return pxbuffer;

}


struct ContentView: View { @State private var player: AVPlayer? @State private var currentFrame: CVPixelBuffer? @State private var currentUIImage: UIImage? // ← 用 UIImage @State private var isProcessing = true var body: some View { VStack { if let uiImage = currentUIImage { Image(uiImage: uiImage) .resizable() .scaledToFit() .frame(width: 300, height: 300) } if let player = player { VideoPlayer(player: player) .frame(height: 300) } else { Text("加载视频中...") } } .onAppear(perform: processVideo) } private func processVideo() { // 自动获取视频路径 guard let videoURL = Bundle.main.url(forResource: "src_IMG_7212", withExtension: "mp4") else { print("错误:无法找到视频文件") // 调试信息 if let bundlePath = Bundle.main.resourcePath { let fileManager = FileManager.default do { let files = try fileManager.contentsOfDirectory(atPath: bundlePath) print("应用程序包中的文件:") files.forEach { print($0) } } catch { print("无法读取应用程序包内容: \(error)") } } return } let outputName = UUID().uuidString + "_stab.mp4" let outputURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(outputName) Task { do { var stabilizer = VideoStabilizer(videoURL: videoURL, outputURL: outputURL) // stabilizer.onFrameProcessed = { [self] frame in // DispatchQueue.main.async { // self?.currentFrame = frame // } // } stabilizer.onFrameProcessed = { buffer in if let uiImage = buffer.toUIImage() { // ← 工具方法 DispatchQueue.main.async { self.currentUIImage = uiImage } } } //let stabilizer = VideoStabilizer(videoURL: videoURL, outputURL: outputURL) try await stabilizer.stabilize() DispatchQueue.main.async { print("视频处理完成:\(outputURL)") // 更新UI或执行其他操作 self.player = AVPlayer(url: outputURL) /* self.player = AVPlayer(url: outputURL) self.player?.play() self.isProcessing = false */ } } catch { DispatchQueue.main.async { print("视频处理失败:\(error)") self.isProcessing = false // 显示错误信息给用户 } } } } func stabilize() async throws { // 1. 读取视频信息 /* let asset = AVURLAsset(url: videoURL) let tracks = try await asset.loadTracks(withMediaType: .video) guard let track = tracks.first else { throw NSError(domain: "VideoStabilizer", code: 1, userInfo: [NSLocalizedDescriptionKey: "No video track found"]) } */ //let videoInfo = try await getVideoInfo(from: asset) //let frameCount = videoInfo.frameCount //let size = videoInfo.size //let fps = videoInfo.fps // 2. 读取所有帧 //let frames = try await readAllFrames(from: asset, track: track) //print("Loaded \(frames.count) frames") logger.info("taggk----------1") // 2. 执行多线程帧读取 //let videoURL = URL(fileURLWithPath: "/path/to/video.mp4") //let totalFrames = calculateTotalFrames(url: videoURL) //logger.info("taggk----------2\(totalFrames)") //let frames = fast_video_read(url: videoURL, num_threads: 8) //let displayView = await FrameDisplayView(frame: CGRect(x: 0, y: 0, width: 300, height: 300)) let frames = await video_read(url: videoURL) //displayView.currentFrame = frames[1] //var frames = readAllFrames(from: asset, track: tracks) // 例如显示第一帧 let nonNilFrames = frames.compactMap { $0 } // 返回 [CVPixelBuffer] logger.info("taggk---------3") print("Loaded \(nonNilFrames.count) frames") 以上代码获取到CVPixelBuffer的视频帧后,如何显示图像用于调试
最新发布
07-23
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值