2014-07-19 3 views
0

까지 나는이 애플 Q & 비디오 프레임을 캡처하는 방법에 대한 코드 IOS 7 코드를 마련하려고 : AppleQ&A1702비디오 캡처 프레임 샘플 IOS 7.0의 코드와는

를 지금까지 내가 교체 한 생각

// Assign session to an ivar. 
[self setSession:session]; 
,691,363 :이 줄에 setSession에 대해 '아니오 인터페이스 오류'를 얻을 수 없기 때문에 내가 코드를 실행할 수 없습니다,

// If you wish to cap the frame rate to a known value, such as 2 fps, set minFrameDuration. 
//output.minFrameDuration = CMTimeMake(1, 2); //deprecated 
[device setActiveVideoMinFrameDuration:CMTimeMake(1, 2)]; //new in IOS7 
[device setActiveVideoMaxFrameDuration:CMTimeMake(1, 2)]; 

그러나 : 올바르게에 사용되지 않는 minFrameDuration 코드입니다 (210)

또한,이 라인에 호환되지 않는 유형의 경고를 얻을 :

[output setSampleBufferDelegate:self queue:queue]; 

내가이 오류 및 경고에 대한하지만 난 난처한 상황에 빠진하고 검색을 많이 후 간단한 답이있다 생각합니다. 누군가가이 오류와 경고를 Apple 샘플 코드에서 바로 얻을 수있는 이유를 알아낼 수 있습니까? 고마워요 Carmen

[EDIT1] 나는 내 deprecated minFrameDuration 수정과 함께 꽤 있다고 생각하지 않으며 그것은 setSession 메서드가 필요하지 않을 수도 있습니다. IOS7 앱에 대해 지정된 프레임 속도로 비디오에서 이미지를 캡처하기 위해 작동하는 잘라 내기 및 붙여 넣기 코드를 함께 조각화하려고합니다.이 코드는 7.0 이하에서 실행하지 않아도됩니다. 나는 일시적으로 캡처 한 이미지를 스토리 보드의 UIImageView에 넣고 있습니다.

어렵지 않아야합니다.

여기까지 전체 코드 섹션입니다.하지만 viewDidLoad에서 setupCaptureSession을 호출하고 위에서 언급 한 호환되지 않는 유형의 경고가 여전히 존재하는 경우 captureOutput 메서드를 트리거하지 않습니다. 아무도 틀린 것을 볼 수 있습니까 (여전히 초보자는 너무 좋은 :

// Create and configure a capture session and start it running 
- (void)setupCaptureSession 
{ 
    NSError *error = nil; 

    // Create the session 
    AVCaptureSession *session = [[AVCaptureSession alloc] init]; 

    // Configure the session to produce resolution for video frames 
    session.sessionPreset = AVCaptureSessionPresetHigh; 

    // Find a suitable AVCaptureDevice 
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 

    // Create a device input with the device and add it to the session. 
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device 
                           error:&error]; 
    if (!input) { 
     // Handling the error appropriately. 
    } 
    [session addInput:input]; 

    // Create a VideoDataOutput and add it to the session 
    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init]; 
    [session addOutput:output]; 

    // Configure your output. 
    dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL); 
    [output setSampleBufferDelegate:self queue:queue]; 

    // Specify the pixel format 
    output.videoSettings = 
    [NSDictionary dictionaryWithObject: 
    [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] 
             forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 


    // If you wish to cap the frame rate to a known value, such as 2 fps, set 
    // minFrameDuration. 
    //output.minFrameDuration = CMTimeMake(1, 2); //deprecated 


    //AVCaptureConnection *conn = [output connectionWithMediaType:AVMediaTypeVideo]; 

    //if (conn.supportsVideoMinFrameDuration) // these are deprecated in IOS7 
    // conn.videoMinFrameDuration = CMTimeMake(1,2); 
    //if (conn.supportsVideoMaxFrameDuration) 
    // conn.videoMaxFrameDuration = CMTimeMake(1,2); 

    NSError *error2; 
    [device lockForConfiguration:&error2]; 
    if (error2 == nil) { 
     if (device.activeFormat.videoSupportedFrameRateRanges){ 
      [device setActiveVideoMinFrameDuration:CMTimeMake(1, 2)]; 
      [device setActiveVideoMaxFrameDuration:CMTimeMake(1, 2)]; 
     }else{ 
      //handle condition 
     } 
    }else{ 
     // handle error2 
    } 
    [device unlockForConfiguration]; 


    // Start the session running to start the flow of data 
    [session startRunning]; 

    // Assign session to an ivar. 
    //[self setSession:session]; //not sure why need this, can't find code that works for it 
} 

- (void)captureOutput:(AVCaptureOutput *)captureOutput 
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
     fromConnection:(AVCaptureConnection *)connection 
{ 
    NSLog(@"captureOutput: didOutputSampleBufferFromConnection"); 

    // Create a UIImage from the sample buffer data 
    UIImage *image = [self imageFromSampleBuffer:sampleBuffer]; 
    dispatch_async(dispatch_get_main_queue(), ^{ 
     //< Add your code here that uses the image > 
     [self.imageView setImage:image]; 
     [self.view setNeedsDisplay];} 
     ); 
} 

// Create a UIImage from sample buffer data 
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer 
{ 
    // Get a CMSampleBuffer's Core Video image buffer for the media data 
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    // Lock the base address of the pixel buffer 
    CVPixelBufferLockBaseAddress(imageBuffer, 0); 

    // Get the number of bytes per row for the pixel buffer 
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); 

    // Get the number of bytes per row for the pixel buffer 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    // Get the pixel buffer width and height 
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer); 

    // Create a device-dependent RGB color space 
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 

    // Create a bitmap graphics context with the sample buffer data 
    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, 
                   bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
    // Create a Quartz image from the pixel data in the bitmap graphics context 
    CGImageRef quartzImage = CGBitmapContextCreateImage(context); 
    // Unlock the pixel buffer 
    CVPixelBufferUnlockBaseAddress(imageBuffer,0); 

    // Free up the context and color space 
    CGContextRelease(context); 
    CGColorSpaceRelease(colorSpace); 

    // Create an image object from the Quartz image 
    UIImage *image = [UIImage imageWithCGImage:quartzImage]; 

    // Release the Quartz image 
    CGImageRelease(quartzImage); 

    return (image); 
} 

답변

0

따라서이 문제의 핵심은 다음 그들이 코드를 제공하지 않은 메서드 호출을 제공 '은 바르에 세션을 할당'에 사과 명령임을 표시합니다.

저는 신물이며 ARC를 사용하기 때문에 합성/ivars를 이해할 필요가 없기를 희망했지만 여전히 그렇지 않습니다.

그러나 여기에 기본 캡처 코드로 작동하는 코드가 있습니다 (내 전체 viewController.). 기본적으로 나는 내 viewController.m 내 가져 오기 라인 이후에 세션을 선언했다.

누군가 내가 뭘했는지 설명 할 수 있지만 지금은 해결할 생각입니다. (여전히 호환되지 않는 유형의 경고에 대한 질문에 언급 된) :

#import "CJKViewController.h" 

AVCaptureSession *session; 

@interface CJKViewController() 

@end 

@implementation CJKViewController 

- (void)viewDidLoad 
{ 
    [super viewDidLoad]; 
    // Do any additional setup after loading the view, typically from a nib. 
    // Create the session 

    [self setupCaptureSession]; 
} 

- (void)didReceiveMemoryWarning 
{ 
    [super didReceiveMemoryWarning]; 
    // Dispose of any resources that can be recreated. 
} 







// Create and configure a capture session and start it running 
- (void)setupCaptureSession 
{ 
    NSError *error = nil; 

// AVCaptureSession *session = [[AVCaptureSession alloc] init]; 
    session = [[AVCaptureSession alloc] init]; 

    // Configure the session to produce lower resolution video frames, if your 
    // processing algorithm can cope. We'll specify medium quality for the 
    // chosen device. 
    session.sessionPreset = AVCaptureSessionPresetHigh; 

    // Find a suitable AVCaptureDevice 
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 

    // Create a device input with the device and add it to the session. 
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device 
                           error:&error]; 
    if (!input) { 
     // Handling the error appropriately. 
    } 
    [session addInput:input]; 

    // Create a VideoDataOutput and add it to the session 
    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init]; 
    [session addOutput:output]; 

    // Configure your output. 
    dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL); 
    [output setSampleBufferDelegate:self queue:queue]; 

    // Specify the pixel format 
    output.videoSettings = 
    [NSDictionary dictionaryWithObject: 
    [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] 
             forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 


    // If you wish to cap the frame rate to a known value, such as 2 fps, set 
    // minFrameDuration. 
    //output.minFrameDuration = CMTimeMake(1, 2); //deprecated 


    //AVCaptureConnection *conn = [output connectionWithMediaType:AVMediaTypeVideo]; 

    //if (conn.supportsVideoMinFrameDuration) 
    // conn.videoMinFrameDuration = CMTimeMake(1,2); 
    //if (conn.supportsVideoMaxFrameDuration) 
    // conn.videoMaxFrameDuration = CMTimeMake(1,2); 

    NSError *error2; 
    [device lockForConfiguration:&error2]; 
    if (error2 == nil) { 
     if (device.activeFormat.videoSupportedFrameRateRanges){ 
      [device setActiveVideoMinFrameDuration:CMTimeMake(1, 2)]; 
      [device setActiveVideoMaxFrameDuration:CMTimeMake(1, 2)]; 
     } 
    }else{ 
     // handle error2 
    } 
    [device unlockForConfiguration]; 


    // Start the session running to start the flow of data 
    [session startRunning]; 

    // Assign session to an ivar. 
    //[self setSession:session]; 
} 

- (void)captureOutput:(AVCaptureOutput *)captureOutput 
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
     fromConnection:(AVCaptureConnection *)connection 
{ 
    NSLog(@"captureOutput: didOutputSampleBufferFromConnection"); 

    // Create a UIImage from the sample buffer data 
    UIImage *image = [self imageFromSampleBuffer:sampleBuffer]; 
    dispatch_async(dispatch_get_main_queue(), ^{ 
     //< Add your code here that uses the image > 
     [self.imageView setImage:image]; 
     [self.view setNeedsDisplay];} 
     ); 
} 

// Create a UIImage from sample buffer data 
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer 
{ 
    // Get a CMSampleBuffer's Core Video image buffer for the media data 
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    // Lock the base address of the pixel buffer 
    CVPixelBufferLockBaseAddress(imageBuffer, 0); 

    // Get the number of bytes per row for the pixel buffer 
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); 

    // Get the number of bytes per row for the pixel buffer 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    // Get the pixel buffer width and height 
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer); 

    // Create a device-dependent RGB color space 
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 

    // Create a bitmap graphics context with the sample buffer data 
    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, 
                   bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
    // Create a Quartz image from the pixel data in the bitmap graphics context 
    CGImageRef quartzImage = CGBitmapContextCreateImage(context); 
    // Unlock the pixel buffer 
    CVPixelBufferUnlockBaseAddress(imageBuffer,0); 

    // Free up the context and color space 
    CGContextRelease(context); 
    CGColorSpaceRelease(colorSpace); 

    // Create an image object from the Quartz image 
    UIImage *image = [UIImage imageWithCGImage:quartzImage]; 

    // Release the Quartz image 
    CGImageRelease(quartzImage); 

    return (image); 
} 

@end 
관련 문제