需求
公司混合开发,uni端拍小视频不是很理想,为达到仿微信效果,原生插件走起
思路
第1步:1个AVCaptureSession, 1块AVCaptureVideoPreviewLayer[考虑兼容替换成AVPreView]
第2步:视频录制需video & audio, 需要对应的AVCaptureDeviceInput,同理对应的AVCaptureVideoDataOutput与AVCaptureAudioDataOutput
第3步:代理中设置output区分video与audio, 并将对应的CMSampleBufferRef写入到视频文件中
第4步:写入视频文件中,用到AVAssetWriter, 对应video & audio 需两个AVAssetWriterInput, 加入AVAssetWriter
第5步:CMSampleBufferRef不断过来,AssetWriter不断写入,直到停止
上菜
第一步的初始化就不写了,没事可以翻看本人前面的博客
第2步:两个AVCaptureDeviceInput 两个Output, 且设置Output的代理
self.videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
if (error) {
NSLog(@"取得设备摄入videoInput对象时出错, 错误原因: %@", error);
return;
}
// 设备添加到会话中
if ([self.session canAddInput:self.videoInput]) {
[self.session addInput:self.videoInput];
}
[self.videoOutput setSampleBufferDelegate:self queue:self.videoQueue];
if ([self.session canAddOutput:self.videoOutput]) {
[self.session addOutput:self.videoOutput];
}
// 音频相关
AVCaptureDevice *adevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
self.audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:adevice error:&error];
if ([self.session canAddInput:self.audioInput]) {
[self.session addInput:self.audioInput];
}
[self.audioOutput setSampleBufferDelegate:self queue:self.videoQueue];
if ([self.session canAddOutput:self.audioOutput]) {
[self.session addOutput:self.audioOutput];
}
// 视频输出
- (AVCaptureVideoDataOutput *)videoOutput {
if (!_videoOutput) {
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
_videoOutput.alwaysDiscardsLateVideoFrames = YES;
}
return _videoOutput;
}
// 音频输出
- (AVCaptureAudioDataOutput *)audioOutput {
if (!_audioOutput) {
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
}
return _audioOutput;
}
第3步:启动Session,代理里面操作CMSampleBufferRef
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate & AVCaptureAudioDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
@autoreleasepool {
// 视频
if (connection == [self.videoOutput connectionWithMediaType:AVMediaTypeVideo]) {
if (!self.manager.outputVideoFormatDescription) {
@synchronized(self) {
CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
self.manager.outputVideoFormatDescription = formatDescription;
}
} else {
@synchronized(self) {
if (self.manager.state == StateRecording) {
[self.manager appendBuffer:sampleBuffer type:AVMediaTypeVideo];
}
}
}
}
//音频
if (connection == [self.audioOutput connectionWithMediaType:AVMediaTypeAudio]) {
if (!self.manager.outputAudioFormatDescription) {
@synchronized(self) {
CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
self.manager.outputAudioFormatDescription = formatDescription;
}
}
@synchronized(self) {
if (self.manager.state == StateRecording) {
[self.manager appendBuffer:sampleBuffer type:AVMediaTypeAudio];
}
}
}
}
}
第4步:AVAssetWriter以及对应的Input
// writer初始化
self.writer = [AVAssetWriter assetWriterWithURL:_videoUrl fileType:AVFileTypeMPEG4 error:nil];
_videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:_videoSettings];
//expectsMediaDataInRealTime 必须设为yes,需要从capture session 实时获取数据
_videoInput.expectsMediaDataInRealTime = YES;
_audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:_audioSettings];
_audioInput.expectsMediaDataInRealTime = YES;
if ([_writer canAddInput:_videoInput]) {
[_writer addInput:_videoInput];
}
if ([_writer canAddInput:_audioInput]) {
[_writer addInput:_audioInput];
}
第5步:第3步的CMSampleBufferRef通过AVAssetWriter写入到视频文件中
- (void)appendBuffer:(CMSampleBufferRef)buffer type:(NSString *)mediaType {
if (buffer == NULL) {
NSLog(@"empty sampleBuffer");
return;
}
@synchronized (self) {
if (self.state < StateRecording) {
NSLog(@"not ready yet");
return;
}
}
CFRetain(buffer);
dispatch_async(self.queue, ^{
@autoreleasepool {
@synchronized (self) {
if (self.state > StateFinish) {
CFRelease(buffer);
return;
}
}
if (!self.canWrite && mediaType == AVMediaTypeVideo) {
[self.writer startWriting];
[self.writer startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(buffer)];
self.canWrite = YES;
}
if(!self.timer) {
dispatch_async(dispatch_get_main_queue(), ^{
self.timer = [NSTimer scheduledTimerWithTimeInterval:TIMER_INTERVAL target:self selector:@selector(updateProgress) userInfo:nil repeats:YES];
[[NSRunLoop currentRunLoop] addTimer:self.timer forMode:NSDefaultRunLoopMode];
});
}
// 写入视频数据
if (mediaType == AVMediaTypeVideo) {
if (self.videoInput.readyForMoreMediaData) {
BOOL success = [self.videoInput appendSampleBuffer:buffer];
if (!success) {
@synchronized (self) {
[self stop:^{}];
[self destroy];
}
}
}
}
// 写入音频数据
if (mediaType == AVMediaTypeAudio) {
if (self.audioInput.readyForMoreMediaData) {
BOOL success = [self.audioInput appendSampleBuffer:buffer];
if (!success) {
@synchronized (self) {
[self stop:^{}];
[self destroy];
}
}
}
}
CFRelease(buffer);
}
});
}
写在末尾:
AVAssetWriterInput设置视频属性时,按照自己的需要设计,其中码率与帧率的设置会影响到拍摄后视频的质量与大小,具体看各自项目的要求
如果视频视角存在问题,可以从三个方向入手调整
1.layer的connect设置下videoOrientation
2.AVCaptureOutput的connect设置下videoOrientation
3.AVAssetWriterInput针对video是设置下transform,比如Rotation M_PI/2 角度
原文:https://www.cnblogs.com/shanzhaishuwu/p/14443499.html