2012-01-24 4 views
3

카메라 미리보기를 표시하는 앱을 만들려고하고 있는데 일부 조건에서 음성 입력으로이를 녹음하고 마지막으로 녹화 된 동영상을 반복합니다.왜 AVCaptureSession, AVAssetWriter 및 AVPlayer로 비디오를 녹화하고 재생합니까?

이미 미리보기/녹음/재생을위한 클래스와 해당 조정을 관리하는 컨트롤러를 작성했습니다.

이러한 기능은 독립적으로 호출 할 때 완벽하게 작동하지만 비디오를 재생할 때 사운드는 실행되지만 이미지가 표시되고 약 5 초 정도 걸리는 경우 함께 작동하도록 할 수는 없습니다.

미리보기 :

- (void) createSession 
{ 
    _session = [[AVCaptureSession alloc] init]; 

    AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:FRONT_CAMERA_ID]; 
    if (!device) device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 

    NSError *error = nil; 
    _cVideoInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain]; 
    if (!error) [_session addInput:_cVideoInput]; 

    device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 

    error = nil; 
    _cAudioInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain]; 
    if (!error) [_session addInput:_cAudioInput]; 

    _cameraLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session]; 
    _cameraLayer.frame = self.bounds; 
    [self.layer addSublayer:_cameraLayer]; 

    _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; 
    [_session setSessionPreset:AVCaptureSessionPreset640x480]; 
    [_videoOutput setVideoSettings:[NSDictionary dictionaryWithContentsOfFile:VIDEO_SETTINGS]]; 

    _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; 

    dispatch_queue_t queue = dispatch_queue_create(OUTPUT_QUEUE_NAME, NULL); 
    [_videoOutput setSampleBufferDelegate:self queue:queue]; 
    [_session addOutput:_videoOutput]; 

    [_audioOutput setSampleBufferDelegate:self queue:queue]; 
    [_session addOutput:_audioOutput]; 

    dispatch_set_context(queue, self); 
    dispatch_set_finalizer_f(queue, queue_finalizer); 

    dispatch_release(queue); 

    [_session startRunning]; 
} 

- (void) deleteSession 
{ 
    [_session stopRunning]; 

    [(AVCaptureVideoPreviewLayer *)_cameraLayer setSession:nil]; 
    [_cameraLayer removeFromSuperlayer]; 
    [_cameraLayer release]; 
    _cameraLayer = nil; 

    [_audioOutput setSampleBufferDelegate:nil queue:NULL]; 
    [_videoOutput setSampleBufferDelegate:nil queue:NULL]; 

    [_audioOutput release]; 
    _audioOutput = nil; 

    [_videoOutput release]; 
    _videoOutput = nil; 

    [_cAudioInput release]; 
    _cAudioInput = nil; 

    [_cVideoInput release]; 
    _cVideoInput = nil; 

    NSArray *inputs = [_session inputs]; 
    for (AVCaptureInput *input in inputs) 
     [_session removeInput:input]; 

    NSArray *outputs = [_session outputs]; 
    for (AVCaptureOutput *output in outputs) 
     [_session removeOutput:output]; 

    [_session release]; 
    _session = nil; 
} 

기록 :

- (void) createWriter 
{ 
    NSString *file = [self file]; 

    if ([[NSFileManager defaultManager] fileExistsAtPath:file]) [[NSFileManager defaultManager] removeItemAtPath:file error:NULL]; 

    NSError *error = nil; 
    _writer = [[AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:file] fileType:AVFileTypeQuickTimeMovie error:&error] retain]; 

    if (error) 
    { 
     [_writer release]; 
     _writer = nil; 

     NSLog(@"%@", error); 
     return; 
    } 

    AudioChannelLayout acl; 
    bzero(&acl, sizeof(acl)); 
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; 

    NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys: 
           [NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey, 
           [NSNumber numberWithFloat:44100.0], AVSampleRateKey, 
           [NSNumber numberWithInt:1], AVNumberOfChannelsKey, 
           [NSNumber numberWithInt:64000], AVEncoderBitRateKey, 
           [NSData dataWithBytes:&acl length:sizeof(acl)], AVChannelLayoutKey, 
           nil ]; 

    _wAudioInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings] retain]; 

    [_writer addInput:_wAudioInput]; 

    settings = [NSDictionary dictionaryWithObjectsAndKeys: 
       AVVideoCodecH264, AVVideoCodecKey, 
       [NSNumber numberWithInt:640], AVVideoWidthKey, 
       [NSNumber numberWithInt:480], AVVideoHeightKey, 
       nil]; 

    _wVideoInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings] retain]; 

    [_writer addInput:_wVideoInput]; 
} 

- (void) deleteWriter 
{ 
    [_wVideoInput release]; 
    _wVideoInput = nil; 

    [_wAudioInput release]; 
    _wAudioInput = nil; 

    [_writer release]; 
    _writer = nil; 
} 

- (void) RecordingAudioWithBuffer:(CMSampleBufferRef)sampleBuffer 
{ 
    if (![self canRecordBuffer:sampleBuffer]) 
     return; 

    if ([_wAudioInput isReadyForMoreMediaData]) 
     [_wAudioInput appendSampleBuffer:sampleBuffer]; 
} 

- (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer 
{ 
    if (![self canRecordBuffer:sampleBuffer]) 
     return; 

    if ([_wVideoInput isReadyForMoreMediaData]) 
     [_wVideoInput appendSampleBuffer:sampleBuffer]; 
} 

재생 :

- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context 
{ 
    AVPlayerItem *item = (AVPlayerItem *)object; 
    [item removeObserver:self forKeyPath:@"status"]; 

    switch (item.status) 
    { 
     case AVPlayerItemStatusReadyToPlay: 

      [_player seekToTime:kCMTimeZero]; 
      [_player play]; 

      [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(finishPlaying:) name:AVPlayerItemDidPlayToEndTimeNotification object:item]; 
      break; 
     case AVPlayerItemStatusUnknown: 
     case AVPlayerItemStatusFailed: 
      break; 
     default: 
      break; 
    } 
} 

- (void) finishPlaying:(NSNotification *)notification 
{ 
    [_player pause]; 
    [_playerLayer removeFromSuperlayer]; 

    [_playerLayer release]; 
    _playerLayer = nil; 

    [_player release]; 
    _player = nil; 

    [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; 
} 

- (void) play:(NSString *)path 
{ 
    _player = [[AVPlayer playerWithURL:[NSURL fileURLWithPath:path]] retain]; 

    _playerLayer = [[AVPlayerLayer playerLayerWithPlayer:_player] retain]; 
    _playerLayer.transform = CATransform3DScale(CATransform3DMakeRotation(M_PI_2, 0, 0, 1), 1, -1, 1); 
    _playerLayer.frame = self.bounds; 
    [self.layer addSublayer:_playerLayer]; 

    [_player.currentItem addObserver:self forKeyPath:@"status" options:0 context:NULL]; 
} 

답변

1

나는 비슷한 문제가 발생되었고, 반면

여기이 내 코드입니다 이것은 fi하지 않았다. x 그것은 완전히 도움이되었습니다. 다시 쓸 준비가되지 않았다면 샘플 버퍼를 잃어 버리는 대신 장치를 잠자 게 한 다음 데이터를 쓸 수 있는지 다시 평가하십시오.

- (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer 
{ 
if (![self canRecordBuffer:sampleBuffer]) 
    return; 

if (!_wVideoInput.readyForMoreMediaData && _isRecording) 
{ 
    [self performSelector:@selector(RecordingVideoWithBuffer:) withObject:(__bridge id)(sampleBuffer) afterDelay:0.05]; 
    return; 
} 

[_wVideoInput appendSampleBuffer:sampleBuffer]; 

ARC를 사용하지 않는 경우 sampleBuffer 만 사용해야합니다. 그러나 ARC는 __bridge 추가가 필요합니다.

EDIT : performSelector를 사용하여 NSThread 절전 모드가 아닌 while 루프와 반대되는 기능을 수행했습니다. 블로킹 빨기.

관련 문제