**********基础知识:
CVPixelBufferRef 是一种像素图片类型,属于 CoreVideo 模块的,在Camera 采集返回的数据里得到一个CMSampleBufferRef,而每个CMSampleBufferRef里则包含一个 CVPixelBufferRef,在视频硬解码的返回数据里也是一个 CVPixelBufferRef。CVPixelBufferRef是iOS视频采集处理编码流程的重要中间数据媒介和纽带,理解CVPixelBufferRef有助于写出高性能可靠的视频处理。还需要学习 YUV,color range,openGL。
1.********获取gif动画中每一帧
#include <ImageIO/ImageIOBase.h>
#include <ImageIO/CGImageSource.h>
//获取gif动画的每一帧
- (NSArray *)loadGifImageArr
{
NSURL *fileUrl = [[NSBundle mainBundle] URLForResource:@"reapanimaet" withExtension:@"gif"];
CGImageSourceRef gifSource = CGImageSourceCreateWithURL((CFURLRef)fileUrl, NULL);
size_t gifCount = CGImageSourceGetCount(gifSource);
NSMutableArray *frames = [[NSMutableArray alloc]init];
for (size_t i = 0; i< gifCount; i++) {
CGImageRef imageRef = CGImageSourceCreateImageAtIndex(gifSource, i, NULL);
UIImage *image = [UIImage imageWithCGImage:imageRef];
[frames addObject:image];
CGImageRelease(imageRef);
}
return frames;
}
2.******获取视频中的每一帧
调用:
NSString *path = [[NSBundle mainBundle]pathForResource:@"testvideo.mov"ofType:nil];
NSURL *url = [NSURL fileURLWithPath:path];
[self splitVideo:url fps:24 completedBlock:^{
}];
/**
* 把视频文件拆成图片保存放到数组中
*
* @param fileUrl 本地视频文件URL
* @param fps 拆分时按此帧率进行拆分
* @param completedBlock 所有帧被拆完成后回调
CMTime是专门用于表示视频时间的结构体
value不是指时间
timescale可以理解为帧率
获得秒数:value / timescale = seconds
创建CMTime 用 CMTimeMake(a, b) 当前第a帧,每秒b帧
*/
- (void)splitVideo:(NSURL *)fileUrl fps:(float)fps completedBlock:(void(^)(void))completedBlock {
if (!fileUrl) {
return;
}
QMWNWEAKSELF;
NSDictionary *optDict = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:NO] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *avasset = [[AVURLAsset alloc] initWithURL:fileUrl options:optDict];
CMTime cmtime = avasset.duration; //视频时间信息结构体
Float64 durationSeconds = CMTimeGetSeconds(cmtime); //视频总秒数
NSMutableArray *times = [NSMutableArray array];
Float64 totalFrames = durationSeconds * fps; //获得视频总帧数
CMTime timeFrame;
for (int i = 1; i <= totalFrames; i++) {
timeFrame = CMTimeMake(i, fps); //第i帧 帧率
NSValue *timeValue = [NSValue valueWithCMTime:timeFrame];
[times addObject:timeValue];
}
AVAssetImageGenerator *imgGenerator = [[AVAssetImageGenerator alloc] initWithAsset:avasset];
//防止时间出现偏差
imgGenerator.requestedTimeToleranceBefore = kCMTimeZero;
imgGenerator.requestedTimeToleranceAfter = kCMTimeZero;
NSInteger timesCount = [times count];
[imgGenerator generateCGImagesAsynchronouslyForTimes:times completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
QMWNSTRONG;
switch (result) {
case AVAssetImageGeneratorCancelled:
break;
case AVAssetImageGeneratorFailed:
break;
case AVAssetImageGeneratorSucceeded: {
UIImage *videoimage = [UIImage imageWithCGImage:image];
[strongS.videoimageArr addObject:videoimage];
if (requestedTime.value == timesCount) {
NSLog(@"completed");
if (completedBlock) {
completedBlock();
}
}
}
break;
}
}];
}
3.******图片转换成视频
#import "ImageTovideovc.h"
#import <AVKit/AVKit.h>
#import <MediaPlayer/MediaPlayer.h>
#import <AVFoundation/AVFoundation.h>
@interface ImageTovideovc()
//未压缩的图片
@property(nonatomic,strong) NSMutableArray*imageArr;
//经过压缩的图片
@property(nonatomic,strong) NSMutableArray*imageArray;
//视频地址
@property(nonatomic,strong)NSString*theVideoPath;
//合成进度
@property(nonatomic,strong)UILabel *progresslabel;
@property(nonatomic,assign)NSInteger videowidth;//视频宽度
@property(nonatomic,assign)NSInteger videoheight;//视频高度
@property(nonatomic,assign)NSInteger videospeed;//播放速度,每秒多少帧
@end
@implementation ImageTovideovc
-(NSMutableArray *)imageArr{
if(nil==_imageArr){
_imageArr=[[NSMutableArray alloc]init];
}
return _imageArr;
}
-(NSMutableArray *)imageArray{
if(nil==_imageArray){
_imageArray=[[NSMutableArray alloc]init];
}
return _imageArray;
}
-(void)viewDidLoad{
[super viewDidLoad];
self.videowidth=self.videowidth?WIDTH:320;
self.videoheight=self.videoheight?HEIGHT:480;
self.videospeed=21;
[self initDatas];
[self initViews];
}
- (void)initViews {
//视频合成按钮
UIButton *button = [UIButton buttonWithType:UIButtonTypeRoundedRect];
[button setBounds:CGRectMake(0,0,WIDTH * 0.25,50)];
button.center = CGPointMake(WIDTH * 0.25, WIDTH * 0.5);
[button setTitle:@"视频合成"forState:UIControlStateNormal];
[button addTarget:self action:@selector(testCompressionSession)forControlEvents:UIControlEventTouchUpInside];
button.backgroundColor = [UIColor redColor];
[self.view addSubview:button];
//视频播放按钮
UIButton *button1=[UIButton buttonWithType:UIButtonTypeRoundedRect];
[button1 setBounds:CGRectMake(0,0,WIDTH * 0.25,50)];
button1.center = CGPointMake(WIDTH * 0.75, WIDTH * 0.5);
[button1 setTitle:@"视频播放"forState:UIControlStateNormal];
[button1 addTarget:self action:@selector(playAction)forControlEvents:UIControlEventTouchUpInside];
button1.backgroundColor = [UIColor redColor];
[self.view addSubview:button1];
//视频合成播放进度提示文本框
UILabel *lbe = [[UILabel alloc]init];
lbe.frame = CGRectMake(0, 0, WIDTH * 0.25, 25);
lbe.center = CGPointMake(WIDTH * 0.5, WIDTH * 0.15);
lbe.textColor = [UIColor blackColor];
lbe.textAlignment = NSTextAlignmentCenter;
lbe.text = @"准备就绪";
lbe.font = [UIFont systemFontOfSize:12];
self.progresslabel = lbe;
[self.view addSubview:lbe];
}
//赋值数据
- (void)initDatas{
NSString *name = @"";
UIImage *img = nil;
//实先准备21张图片,命名为0.jpg至21.jpg
for (int i = 0; i < 22; i++) {
name = [NSString stringWithFormat:@"%d",i];
img = [UIImage imageNamed:name];
[self.imageArr addObject:img];
}
//对图片进行裁剪,方便合成等比例视频
for (int i = 0; i < self.imageArr.count; i++) {
UIImage *imageNew = self.imageArr[i];
//设置image的尺寸
CGSize imgeSize = CGSizeMake(self.videowidth,self.videoheight);
//对图片大小进行压缩--
imageNew = [self imageWithImage:imageNew scaledToSize:imgeSize];
[self.imageArray addObject:imageNew];
}
}
//对图片进行压缩
-(UIImage*)imageWithImage:(UIImage*)image scaledToSize:(CGSize)newSize
{
// 新创建的位图上下文 newSize为其大小
UIGraphicsBeginImageContext(newSize);
// 对图片进行尺寸的改变
[image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
// 从当前上下文中获取一个UIImage对象 即获取新的图片对象
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return newImage;
}
//创建视频存储路径
-(NSString *)createVideopath{
NSString *documentPath = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES).firstObject;
//删除之前的路径
if(self.theVideoPath &&![self.theVideoPath isEqualToString:@""]){
NSString *fileName = [self.theVideoPath componentsSeparatedByString:@"/"].lastObject;
NSString *newsPath = [documentPath stringByAppendingPathComponent:fileName];
[[NSFileManager defaultManager]removeItemAtPath:newsPath error:nil];
self.theVideoPath=nil;
}
return documentPath;
}
//视频合成按钮点击操作
- (void)testCompressionSession {
QMWNWEAKSELF;
NSString *documentPath= [self createVideopath];
//设置mov路径
NSString *moviePath = [documentPath stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mov",@"imagetovideo"]];
self.theVideoPath=moviePath;
//定义视频的大小320 480 倍数
CGSize size = CGSizeMake(self.videowidth,self.videoheight);
NSError *error = nil;
// 转成UTF-8编码
unlink([moviePath UTF8String]);
NSLog(@"path->%@",moviePath);
// iphone提供了AVFoundation库来方便的操作多媒体设备,AVAssetWriter这个类可以方便的将图像和音频写成一个完整的视频文件
AVAssetWriter *videoWriter = [[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:moviePath]fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
if(error) {
NSLog(@"error =%@",[error localizedDescription]);
return;
}
//mov的格式设置 编码格式 宽度 高度
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecTypeH264,AVVideoCodecKey,
[NSNumber numberWithInt:size.width],AVVideoWidthKey,
[NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];
// AVAssetWriterInputPixelBufferAdaptor提供CVPixelBufferPool实例,
// 可以使用分配像素缓冲区写入输出文件。使用提供的像素为缓冲池分配通常
// 是更有效的比添加像素缓冲区分配使用一个单独的池
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if([videoWriter canAddInput:writerInput]){
NSLog(@"可以添加输入");
}else{
NSLog(@"不可以");
return;
}
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//合成多张图片为一个视频文件
dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue",NULL);
int __block frame = 0;//第多少帧
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
QMWNSTRONG;
while([writerInput isReadyForMoreMediaData]) {
if(++frame >= [strongS.imageArray count] * strongS.videospeed) {
[writerInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(@"完成");
[[NSOperationQueue mainQueue] addOperationWithBlock:^{
QMWNSTRONG;
strongS.progresslabel.text = @"视频合成完毕";
[strongS saveVideoToAlbumWith:moviePath];//保存到相册
}];
}];
break;
}
CVPixelBufferRef buffer = NULL;//像素图片类型
int idx = frame / strongS.videospeed;//总帧数除以帧率得到第几张图
NSLog(@"第idx==%d张图片",idx);
NSString *progress = [NSString stringWithFormat:@"%0.2lu",idx / [self.imageArr count]];
[[NSOperationQueue mainQueue] addOperationWithBlock:^{
QMWNSTRONG;
strongS.progresslabel.text = [NSString stringWithFormat:@"合成进度:%@",progress];
}];
NSNumber *speedNum=[NSNumber numberWithInteger:strongS.videospeed];
int speedvalue=[speedNum intValue];
//由图片生成像素图片类型
buffer = (CVPixelBufferRef)[strongS pixelBufferFromCGImage:[[strongS.imageArray objectAtIndex:idx]CGImage]size:size];
if(buffer){
//设置每秒钟播放图片的个数
if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame,speedvalue)]) {
NSLog(@"FAIL");
} else {
NSLog(@"OK");
}
CFRelease(buffer);
}
}
}];
}
//由图片生成像素图片类型
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef) options,&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer,0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata !=NULL);
CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
// 当你调用这个函数的时候,Quartz创建一个位图绘制环境,也就是位图上下文。当你向上下文中绘制信息时,Quartz把你要绘制的信息作为位图数据绘制到指定的内存块。一个新的位图上下文的像素格式由三个参数决定:每个组件的位数,颜色空间,alpha选项
CGContextRef context = CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
//使用CGContextDrawImage绘制图片 这里设置不正确的话 会导致视频颠倒
// 当通过CGContextDrawImage绘制图片到一个context中时,如果传入的是UIImage的CGImageRef,因为UIKit和CG坐标系y轴相反,所以图片绘制将会上下颠倒
CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);
// 释放色彩空间
CGColorSpaceRelease(rgbColorSpace);
// 释放context
CGContextRelease(context);
// 解锁pixel buffer
CVPixelBufferUnlockBaseAddress(pxbuffer,0);
return pxbuffer;
}
//视频播放按钮点击操作
- (void)playAction {
NSLog(@"************%@",self.theVideoPath);
// 文件管理器
NSFileManager *fileManager = [[NSFileManager alloc]init];
if (![fileManager fileExistsAtPath:self.theVideoPath]) {
self.progresslabel.text = @"文件不存在";
return;
}
NSURL *sourceMovieURL = [NSURL fileURLWithPath:self.theVideoPath];
AVAsset *movieAsset = [AVURLAsset URLAssetWithURL:sourceMovieURL options:nil];
AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:movieAsset];
AVPlayer *player = [AVPlayer playerWithPlayerItem:playerItem];
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player];
playerLayer.frame = CGRectMake(0, 0, WIDTH, HEIGHT);
playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
[self.view.layer addSublayer:playerLayer];
[player play];
}
//保存视频到相册
-(void)saveVideoToAlbumWith:(NSString *)urlStr{
// if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(urlStr)) {//这里也会执行一次下面的代理方法
//保存视频相册核心代码
UISaveVideoAtPathToSavedPhotosAlbum(urlStr, self, @selector(video:didFinishSavingWithError:contextInfo:), nil);
//}
}
#pragma mark 视频保存完毕的回调
- (void)video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo{
if (error) {
NSLog(@"保存视频过程中发生错误,错误信息:%@",error.localizedDescription);
}else{
NSLog(@"视频保存成功.");
[self createVideopath];//保存完吧沙盒中的路径清空
}
}
3.其他视频和图片互转
1、图片转视频流(CVPixelBufferRef)
// image convert to Pixel buffer
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image
size:(CGSize)imageSize
{
NSDictionary *options = @{(id)kCVPixelBufferCGImageCompatibilityKey: @YES,
(id)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES};
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, imageSize.width,
imageSize.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, imageSize.width,
imageSize.height, 8, 4*imageSize.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0 + (imageSize.width-CGImageGetWidth(image))/2,
(imageSize.height-CGImageGetHeight(image))/2,
CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
2、视频流(CVImageBufferRef)转图片
// CVImageBufferRef to UIImage
- (UIImage *)screenshotOfVideoStream:(CVImageBufferRef)imageBuffer
{
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [temporaryContext
createCGImage:ciImage
fromRect:CGRectMake(0, 0,
CVPixelBufferGetWidth(imageBuffer),
CVPixelBufferGetHeight(imageBuffer))];
UIImage *image = [[UIImage alloc] initWithCGImage:videoImage];
CGImageRelease(videoImage);
return image;
}
3、视频流(CVImageBufferRef)转图片
+ (UIImage *)trasformToImageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];
CGImageRelease(quartzImage);
return (image);
}