2013-11-25 3 views

답변

5

있는 UIImage 속성을 정의하고 클래스가 AVCaptureVideoDataOutputSampleBufferDelegate 프로토콜을 구현해야합니다 :

@interface ViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate> 

@property (nonatomic, strong) UIImage *theImage; 

@end 
의 viewDidLoad에서

또는 적절한 뭔가를이 추가 :

[self captureImage]; 

를 다음과 같은 방법 구현 :

- (void)captureImage 
{ 
    AVCaptureSession *session = [[AVCaptureSession alloc] init]; 
    session.sessionPreset = AVCaptureSessionPresetHigh; 

    AVCaptureDevice *device = [self frontCamera]; 


    NSError *error = nil; 
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; 
    if (!input) { 
     // Handle the error appropriately. 
     NSLog(@"no input....."); 
    } 
    [session addInput:input]; 

    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init]; 
    [session addOutput:output]; 
    output.videoSettings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) }; 

    dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL); 

    [output setSampleBufferDelegate:self queue:queue]; 

    [session startRunning]; 

    [session stopRunning]; 
} 

- (AVCaptureDevice *)frontCamera { 
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
    for (AVCaptureDevice *device in devices) { 
     if ([device position] == AVCaptureDevicePositionFront) { 
     return device; 
     } 
    } 
    return nil; 
} 

- (void)captureOutput:(AVCaptureOutput *)captureOutput 
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
     fromConnection:(AVCaptureConnection *)connection { 


    CGImageRef cgImage = [self imageFromSampleBuffer:sampleBuffer]; 
    self.theImage = [UIImage imageWithCGImage: cgImage ]; 
    CGImageRelease(cgImage); 

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
    NSString *documentsPath = [paths objectAtIndex:0]; 
    NSString *filePath = [documentsPath stringByAppendingPathComponent:@"image.png"]; 
    NSData* data = UIImagePNGRepresentation(self.theImage); 
    [data writeToFile:filePath atomically:YES]; 
} 

- (CGImageRef) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer 
{ 
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    CVPixelBufferLockBaseAddress(imageBuffer,0); 
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer); 
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 

    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
    CGImageRef newImage = CGBitmapContextCreateImage(newContext); 
    CGContextRelease(newContext); 

    CGColorSpaceRelease(colorSpace); 
    CVPixelBufferUnlockBaseAddress(imageBuffer,0); 

    return newImage; 
} 
+0

카메라의 전체 해상도가 없습니다. . 대신에'AVCaptureStillImageOutput'을 사용해야합니다. – whoKnows

+0

이미지는 카메라 역할과 마찬가지로 완전한 검은 색 이미지입니다. – SM18

관련 문제