iOS拍個小視訊

森雨豪發表於2021-02-24
  • 需求

    公司混合開發,uni端拍小視訊不是很理想,為達到仿微信效果,原生外掛走起

  • 思路

    第1步:1個AVCaptureSession, 1塊AVCaptureVideoPreviewLayer[考慮相容替換成AVPreView]

    第2步:視訊錄製需video & audio, 需要對應的AVCaptureDeviceInput,同理對應的AVCaptureVideoDataOutput與AVCaptureAudioDataOutput

    第3步:代理中設定output區分video與audio, 並將對應的CMSampleBufferRef寫入到視訊檔案中

    第4步:寫入視訊檔案中,用到AVAssetWriter, 對應video & audio 需兩個AVAssetWriterInput, 加入AVAssetWriter

    第5步:CMSampleBufferRef不斷過來,AssetWriter不斷寫入,直到停止

  • 上菜

    第一步的初始化就不寫了,沒事可以翻看本人前面的部落格

    第2步:兩個AVCaptureDeviceInput 兩個Output, 且設定Output的代理

    self.videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
    if (error) {
        NSLog(@"取得裝置攝入videoInput物件時出錯, 錯誤原因: %@", error);
        return;
    }
    
    // 裝置新增到會話中
    if ([self.session canAddInput:self.videoInput]) {
        [self.session addInput:self.videoInput];
    }
    
    [self.videoOutput setSampleBufferDelegate:self queue:self.videoQueue];
    if ([self.session canAddOutput:self.videoOutput]) {
        [self.session addOutput:self.videoOutput];
    }
    
    // 音訊相關
    AVCaptureDevice *adevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    self.audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:adevice error:&error];
    
    if ([self.session canAddInput:self.audioInput]) {
        [self.session addInput:self.audioInput];
    }
    
    [self.audioOutput setSampleBufferDelegate:self queue:self.videoQueue];
    if ([self.session canAddOutput:self.audioOutput]) {
        [self.session addOutput:self.audioOutput];
    }
    
    // 視訊輸出
    - (AVCaptureVideoDataOutput *)videoOutput {
        if (!_videoOutput) {
            _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
            _videoOutput.alwaysDiscardsLateVideoFrames = YES;
        }
        return _videoOutput;
    }
    
    // 音訊輸出
    - (AVCaptureAudioDataOutput *)audioOutput {
        if (!_audioOutput) {
            _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
        }
        return _audioOutput;
    }
    
    

    第3步:啟動Session,代理裡面操作CMSampleBufferRef

    #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate & AVCaptureAudioDataOutputSampleBufferDelegate
    - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
        @autoreleasepool {
            // 視訊
            if (connection == [self.videoOutput connectionWithMediaType:AVMediaTypeVideo]) {
                if (!self.manager.outputVideoFormatDescription) {
                    @synchronized(self) {
                        CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
                        self.manager.outputVideoFormatDescription = formatDescription;
                    }
                } else {
                    @synchronized(self) {
                        if (self.manager.state == StateRecording) {
                            [self.manager appendBuffer:sampleBuffer type:AVMediaTypeVideo];
                        }
                    }
                }
            }
            
            //音訊
            if (connection == [self.audioOutput connectionWithMediaType:AVMediaTypeAudio]) {
                if (!self.manager.outputAudioFormatDescription) {
                    @synchronized(self) {
                        CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
                        self.manager.outputAudioFormatDescription = formatDescription;
                    }
                }
                @synchronized(self) {
                    if (self.manager.state == StateRecording) {
                        [self.manager appendBuffer:sampleBuffer type:AVMediaTypeAudio];
                    }
                }
            }
        }
    }
    

    第4步:AVAssetWriter以及對應的Input

    // writer初始化
    self.writer = [AVAssetWriter assetWriterWithURL:_videoUrl fileType:AVFileTypeMPEG4 error:nil];
    
    _videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:_videoSettings];
    //expectsMediaDataInRealTime 必須設為yes,需要從capture session 實時獲取資料
    _videoInput.expectsMediaDataInRealTime = YES;
    
    _audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:_audioSettings];
    _audioInput.expectsMediaDataInRealTime = YES;
    
    if ([_writer canAddInput:_videoInput]) {
        [_writer addInput:_videoInput];
    }
    if ([_writer canAddInput:_audioInput]) {
        [_writer addInput:_audioInput];
    }
    

    第5步:第3步的CMSampleBufferRef通過AVAssetWriter寫入到視訊檔案中

    - (void)appendBuffer:(CMSampleBufferRef)buffer type:(NSString *)mediaType {
        if (buffer == NULL) {
            NSLog(@"empty sampleBuffer");
            return;
        }
        
        @synchronized (self) {
            if (self.state < StateRecording) {
                NSLog(@"not ready yet");
                return;
            }
        }
        
        CFRetain(buffer);
        dispatch_async(self.queue, ^{
            @autoreleasepool {
                @synchronized (self) {
                    if (self.state > StateFinish) {
                        CFRelease(buffer);
                        return;
                    }
                }
                
                if (!self.canWrite && mediaType == AVMediaTypeVideo) {
                    [self.writer startWriting];
                    [self.writer startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(buffer)];
                    self.canWrite = YES;
                }
                
                if(!self.timer) {
                    dispatch_async(dispatch_get_main_queue(), ^{
                        self.timer = [NSTimer scheduledTimerWithTimeInterval:TIMER_INTERVAL target:self selector:@selector(updateProgress) userInfo:nil repeats:YES];
                        [[NSRunLoop currentRunLoop] addTimer:self.timer forMode:NSDefaultRunLoopMode];
                    });
                }
                
                // 寫入視訊資料
                if (mediaType == AVMediaTypeVideo) {
                    if (self.videoInput.readyForMoreMediaData) {
                        BOOL success = [self.videoInput appendSampleBuffer:buffer];
                        if (!success) {
                            @synchronized (self) {
                                [self stop:^{}];
                                [self destroy];
                            }
                        }
                    }
                }
                
                // 寫入音訊資料
                if (mediaType == AVMediaTypeAudio) {
                    if (self.audioInput.readyForMoreMediaData) {
                        BOOL success = [self.audioInput appendSampleBuffer:buffer];
                        if (!success) {
                            @synchronized (self) {
                                [self stop:^{}];
                                [self destroy];
                            }
                        }
                    }
                }
                CFRelease(buffer);
            }
        });
    }
    
    
  • 寫在末尾:

    1. AVAssetWriterInput設定視訊屬性時,按照自己的需要設計,其中位元速率與幀率的設定會影響到拍攝後視訊的質量與大小,具體看各自專案的要求

    2. 如果視訊視角存在問題,可以從三個方向入手調整

      1.layer的connect設定下videoOrientation

      2.AVCaptureOutput的connect設定下videoOrientation

      3.AVAssetWriterInput針對video是設定下transform,比如Rotation M_PI/2 角度

相關文章