- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ @synchronized(self) { if(!self.isRecording||self.isPause){ return; } BOOL isVideo = YES; if(captureOutput == self.audioOutput){ isVideo = NO; } if(![self processSampleBuffer:sampleBuffer isVideo:isVideo]){ if((sampleBuffer = [self processPartialRecord:sampleBuffer isVideo:isVideo])) { [self encodeFrame:sampleBuffer isVideo:isVideo]; CFRelease(sampleBuffer); } } } } - (CMSampleBufferRef)processPartialRecord:(CMSampleBufferRef)sampleBuffer isVideo:(BOOL)isVideo{ if (_interrupted){ if (isVideo){ return nil; } _interrupted = NO; CMTime presentTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); CMTime last = isVideo ? _lastVideo : _lastAudio; if (CMTIME_IS_VALID(last)){ if (CMTIME_IS_VALID(_timeOffset)){ presentTimeStamp = CMTimeSubtract(presentTimeStamp, _timeOffset); } CMTime offset = CMTimeSubtract(presentTimeStamp, last); [self logCMTime:offset]; if (_timeOffset.value == 0){ _timeOffset = offset; } else{ _timeOffset = CMTimeAdd(_timeOffset, offset); } } _lastVideo.flags = 0; _lastAudio.flags = 0; } CFRetain(sampleBuffer); if (_timeOffset.value > 0){ CFRelease(sampleBuffer); sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset]; } CMTime presentTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); CMTime duration = CMSampleBufferGetDuration(sampleBuffer); if (duration.value > 0){ presentTimeStamp = CMTimeAdd(presentTimeStamp, duration); } if (isVideo){ _lastVideo = presentTimeStamp; } else{ _lastAudio = presentTimeStamp; } return sampleBuffer; } - (CMSampleBufferRef) adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset { CMItemCount count; CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count); CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count); CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count); for (CMItemCount i = 0; i < count; i++){ pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset); pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset); } CMSampleBufferRef sout; CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout); free(pInfo); return sout; } - (BOOL) encodeFrame:(CMSampleBufferRef) sampleBuffer isVideo:(BOOL)isVideo { if (CMSampleBufferDataIsReady(sampleBuffer)) { if (_writer.status == AVAssetWriterStatusUnknown){ CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); [_writer startWriting]; [_writer startSessionAtSourceTime:startTime]; } if (_writer.status == AVAssetWriterStatusFailed){ NSLog(@"error %@", _writer.error.localizedDescription); return NO; } if (isVideo){ if (self.videoWriterInput.readyForMoreMediaData){ [self.videoWriterInput appendSampleBuffer:sampleBuffer]; return YES; } }else{ if(self.audioWriterInput.readyForMoreMediaData){ [self.audioWriterInput appendSampleBuffer:sampleBuffer]; return YES; } } } return NO; }
原文:http://blog.csdn.net/whf727/article/details/18702643