AVCaptureVideoDataOutputSampleBufferDelegate 中需要注意对sampleBuffer的使用。如果想将sampleBuffer保留住以便后面使用的话,这里会出现一些问题。如果sampleBuffer时间太长,那么着的delegate就可能会被停止调用的哦。一般你会发现怎么就只有13帧啊。这个在文档中有明确的说明。但是普通的copy根本不起作用。createCopy只是拷贝了一点基本的属性,但是对于数据只是做了retain操作。所以唯一的办法就是将data进行copy了,然后使用create进行。可以参考一下这个代码。但是这里又会有一个问题,那就是内存超级的大,所以这个对于video来说要谨慎使用了。
- (CMSampleBufferRef)CMSampleBufferCreateCopyWithDeep:(CMSampleBufferRef)sampleBuffer{ CFRetain(sampleBuffer); CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); //CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); CMItemCount timingCount; CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, 0, nil, &timingCount); CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * timingCount); CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, timingCount, pInfo, &timingCount); CMItemCount sampleCount = CMSampleBufferGetNumSamples(sampleBuffer); CMItemCount sizeArrayEntries; CMSampleBufferGetSampleSizeArray(sampleBuffer, 0, nil, &sizeArrayEntries); size_t *sizeArrayOut = malloc(sizeof(size_t) * sizeArrayEntries); CMSampleBufferGetSampleSizeArray(sampleBuffer, sizeArrayEntries, sizeArrayOut, &sizeArrayEntries); //CFArrayRef attachArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer,NO); //buffer-level attachments //Exif CMSampleBufferRef sout = nil; if(dataBuffer){ CMSampleBufferCreate(kCFAllocatorDefault, dataBuffer, YES, nil,nil, formatDescription, sampleCount, timingCount, pInfo, sizeArrayEntries, sizeArrayOut, &sout); }else{ CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); CVImageBufferRef cvimgRef = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(cvimgRef,0); uint8_t *buf=(uint8_t *)CVPixelBufferGetBaseAddress(cvimgRef); size_t size = CVPixelBufferGetDataSize(cvimgRef); void * data = nil; if(buf){ data = malloc(size); memcpy(data, buf, size); } size_t width = CVPixelBufferGetWidth(cvimgRef); size_t height = CVPixelBufferGetHeight(cvimgRef); OSType pixFmt = CVPixelBufferGetPixelFormatType(cvimgRef); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(cvimgRef); CVPixelBufferRef pixelBufRef = NULL; CMSampleTimingInfo timimgInfo = kCMTimingInfoInvalid; CMSampleBufferGetSampleTimingInfo(sampleBuffer, 0, &timimgInfo); OSStatus result = 0; CVPixelBufferCreateWithBytes(kCFAllocatorDefault, width, height, pixFmt, data, bytesPerRow, NULL, NULL, NULL, &pixelBufRef); CMVideoFormatDescriptionRef videoInfo = NULL; result = CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBufRef, &videoInfo); CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBufRef, true, NULL, NULL, videoInfo, &timimgInfo, &sout); CMItemCount sizeArrayEntries; CMSampleBufferGetSampleSizeArray(sout, 0, nil, &sizeArrayEntries); size_t *sizeArrayOut = malloc(sizeof(size_t) * sizeArrayEntries); CMSampleBufferGetSampleSizeArray(sout, sizeArrayEntries, sizeArrayOut, &sizeArrayEntries); free(sizeArrayOut); if(!CMSampleBufferIsValid(sout)){ NSLog(@""); } } free(pInfo); free(sizeArrayOut); CFRelease(sampleBuffer); return sout; }
AVCaptureVideoDataOutputSampleBufferDelegate 注意点
原文:http://blog.csdn.net/whf727/article/details/18706095