分段录制也就是可以暂停之后恢复录制,并且录制结束之后是在同一个文件中。不管video还是audio都是有时间戳的frame,真是因为有时间戳播放器才能有序的进行播放了。因此在分段录制中,只要在暂停的时候记录一下当前的一个时间戳,然后在恢复之后计算一下这之间的时间差,然后在将这个frame写入之前修改一下这个frame的时间戳就行了。原理还是蛮简单的,因此实现也简单的多了。code说明一切,需要的咨询读一下代码。
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
@synchronized(self)
{
if(!self.isRecording||self.isPause){
return;
}
BOOL isVideo = YES;
if(captureOutput == self.audioOutput){
isVideo = NO;
}
if(![self processSampleBuffer:sampleBuffer isVideo:isVideo]){
if((sampleBuffer = [self processPartialRecord:sampleBuffer isVideo:isVideo]))
{
[self encodeFrame:sampleBuffer isVideo:isVideo];
CFRelease(sampleBuffer);
}
}
}
}
- (CMSampleBufferRef)processPartialRecord:(CMSampleBufferRef)sampleBuffer isVideo:(BOOL)isVideo{
if (_interrupted){
if (isVideo){
return nil;
}
_interrupted = NO;
CMTime presentTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime last = isVideo ? _lastVideo : _lastAudio;
if (CMTIME_IS_VALID(last)){
if (CMTIME_IS_VALID(_timeOffset)){
presentTimeStamp = CMTimeSubtract(presentTimeStamp, _timeOffset);
}
CMTime offset = CMTimeSubtract(presentTimeStamp, last);
[self logCMTime:offset];
if (_timeOffset.value == 0){
_timeOffset = offset;
}
else{
_timeOffset = CMTimeAdd(_timeOffset, offset);
}
}
_lastVideo.flags = 0;
_lastAudio.flags = 0;
}
CFRetain(sampleBuffer);
if (_timeOffset.value > 0){
CFRelease(sampleBuffer);
sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset];
}
CMTime presentTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime duration = CMSampleBufferGetDuration(sampleBuffer);
if (duration.value > 0){
presentTimeStamp = CMTimeAdd(presentTimeStamp, duration);
}
if (isVideo){
_lastVideo = presentTimeStamp;
}
else{
_lastAudio = presentTimeStamp;
}
return sampleBuffer;
}
- (CMSampleBufferRef) adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset
{
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
for (CMItemCount i = 0; i < count; i++){
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
}
CMSampleBufferRef sout;
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
free(pInfo);
return sout;
}
- (BOOL) encodeFrame:(CMSampleBufferRef) sampleBuffer isVideo:(BOOL)isVideo
{
if (CMSampleBufferDataIsReady(sampleBuffer))
{
if (_writer.status == AVAssetWriterStatusUnknown){
CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[_writer startWriting];
[_writer startSessionAtSourceTime:startTime];
}
if (_writer.status == AVAssetWriterStatusFailed){
NSLog(@"error %@", _writer.error.localizedDescription);
return NO;
}
if (isVideo){
if (self.videoWriterInput.readyForMoreMediaData){
[self.videoWriterInput appendSampleBuffer:sampleBuffer];
return YES;
}
}else{
if(self.audioWriterInput.readyForMoreMediaData){
[self.audioWriterInput appendSampleBuffer:sampleBuffer];
return YES;
}
}
}
return NO;
}