2017-11-01 1 views
0

나는 뒤에 오는 카메라를 사용하는 앱을 만들기 위해 https://stackoverflow.com/a/32381052/8422218의 답을 따르고 필터를 추가 한 다음 화면에 실시간으로 표시합니다.실시간 카메라 미리보기에 필터 적용 - 신속한

// 
// ViewController.swift 
// CameraFilter 
// 

import UIKit 
import AVFoundation 

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { 

    var captureSession = AVCaptureSession() 
    var backCamera: AVCaptureDevice? 
    var frontCamera: AVCaptureDevice? 
    var currentCamera: AVCaptureDevice? 

    var photoOutput: AVCapturePhotoOutput? 

    var cameraPreviewLayer: AVCaptureVideoPreviewLayer? 

    @IBOutlet weak var filteredImage: UIImageView! 

    override func viewDidLoad() { 
     super.viewDidLoad() 

     setupCaptureSession() 
     setupDevice() 
     setupInputOutput() 
     setupCorrectFramerate(currentCamera: currentCamera!) // will default to 30fps unless stated otherwise 
     setupPreviewLayer() 
     startRunningCaptureSession() 
    } 

    func setupCaptureSession() { 
     // should support anything up to 1920x1080 res, incl. 240fps @ 720p 
     captureSession.sessionPreset = AVCaptureSession.Preset.high 
    } 

    func setupDevice() { 
     let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified) 
     let devices = deviceDiscoverySession.devices 

     for device in devices { 
      if device.position == AVCaptureDevice.Position.back { 
       backCamera = device 
      } 
      else if device.position == AVCaptureDevice.Position.front { 
       frontCamera = device 
      } 
     } 

     currentCamera = backCamera 
    } 

    func setupInputOutput() { 
     do { 
      let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!) 
      captureSession.addInput(captureDeviceInput) 
      photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil) 
     } catch { 
      print(error) 
     } 
    } 

    func setupCorrectFramerate(currentCamera: AVCaptureDevice) { 
     for vFormat in currentCamera.formats { 
      //see available types 
      //print("\(vFormat) \n") 

      var ranges = vFormat.videoSupportedFrameRateRanges as [AVFrameRateRange] 
      let frameRates = ranges[0] 

      do { 
       //set to 240fps - available types are: 30, 60, 120 and 240 and custom 
       // lower framerates cause major stuttering 
       if frameRates.maxFrameRate == 240 { 
        try currentCamera.lockForConfiguration() 
        currentCamera.activeFormat = vFormat as AVCaptureDevice.Format 
        //for custom framerate set min max activeVideoFrameDuration to whatever you like, e.g. 1 and 180 
        currentCamera.activeVideoMinFrameDuration = frameRates.minFrameDuration 
        currentCamera.activeVideoMaxFrameDuration = frameRates.maxFrameDuration 
       } 
      } 
      catch { 
       print("Could not set active format") 
       print(error) 
      } 
     } 
    } 

    func setupPreviewLayer() { 
     cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
     cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill 
     cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait 
     cameraPreviewLayer?.frame = self.view.frame 

     //set preview in background, allows for elements to be placed in the foreground 
     self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0) 
    } 

    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
     let videoOutput = AVCaptureVideoDataOutput() 
     videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main) 

     let comicEffect = CIFilter(name: "CIComicEffect") 

     let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) 
     let cameraImage = CIImage(cvImageBuffer: pixelBuffer!) 

     comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey) 

     //let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!) 
     let filteredImage = UIImage(ciImage: comicEffect!.value(forKey: kCIOutputImageKey) as! CIImage!) 

     print("made it here") 


     DispatchQueue.main.async { 
      self.filteredImage.image = filteredImage 
     } 
    } 

    func startRunningCaptureSession() { 
     captureSession.startRunning() 
     backCamera?.unlockForConfiguration() 
    } 

    override func didReceiveMemoryWarning() { 
     super.didReceiveMemoryWarning() 
     // Dispose of any resources that can be recreated. 
    } 


} 

내 스토리 보드 전체 화면 크기의있는 UIImageView를 포함

여기 내 코드입니다. 응용 프로그램을 실행할 때 카메라 미리보기 만 볼 수 있지만 적용된 필터는 볼 수 없습니다. 내가 어디로 잘못 가고 있니?

또한 애플리케이션을 작성하는 데 필요한 관련 코드가 모두 들어있는 다음의 Repo를 발견했습니다. https://github.com/altitudelabs/iOSRealTimeFilterTutorial

는 목표 - C로 작성 꽤 오래된하지만 성공하지 스위프트 코드에 그 변환에 이동을 받았어요 :

// 
// ViewController.swift 
// CameraFilter 
// 

import UIKit 
import AVFoundation 
import GLKit 

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { 

    var videoPreviewView: GLKView? 
    var ciContext: CIContext? 
    var eaglContext: EAGLContext? 
    var videoPreviewViewBounds = CGRect.zero 
    var videoDevice: AVCaptureDevice? 

    var captureSession = AVCaptureSession() 

    var backCamera: AVCaptureDevice? 
    var frontCamera: AVCaptureDevice? 
    var currentCamera: AVCaptureDevice? 
    var cameraPreviewLayer: AVCaptureVideoPreviewLayer? 

    override func viewDidLoad() { 
     super.viewDidLoad() 
     self.view.backgroundColor = UIColor.clear 

     let window: UIView? = (UIApplication.shared.delegate as? AppDelegate)?.window 
     eaglContext = EAGLContext(api: .openGLES2) 
     videoPreviewView = GLKView(frame: (window?.bounds)!, context: eaglContext!) 
     videoPreviewView?.enableSetNeedsDisplay = false 

     videoPreviewView?.transform = CGAffineTransform(rotationAngle: CGFloat.pi * 2) 
     videoPreviewView?.frame = (window?.bounds)! 

     videoPreviewView?.bindDrawable() 

     videoPreviewViewBounds = CGRect.zero 

     videoPreviewViewBounds.size.width = CGFloat(videoPreviewView!.drawableWidth) 
     videoPreviewViewBounds.size.height = CGFloat(videoPreviewView!.drawableHeight) 

     ciContext = CIContext(eaglContext: eaglContext!, options: [kCIContextWorkingColorSpace: NSNull()]) 

     setupDevice() 

     setupCaptureSession() 
     setupInputOutput() 
     setupCorrectFramerate(currentCamera: currentCamera!) 
     setupPreviewLayer() 



    } 

    func setupCaptureSession() { 
     // should support anything up to 1920x1080 res, incl. 240fps @ 720p 
     captureSession.sessionPreset = AVCaptureSession.Preset.high 
    } 

    func setupPreviewLayer() { 
     cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
     cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill 
     cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait 
     cameraPreviewLayer?.frame = self.view.frame 

     //set preview in background, allows for elements to be placed in the foreground 
     self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0) 
    } 

    func setupInputOutput() { 
     do { 
      let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!) 
      captureSession.addInput(captureDeviceInput) 

      let videoDataOutput = AVCaptureVideoDataOutput() 
      videoDataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as String): kCVPixelFormatType_32BGRA] 

      let captureSessionQueue = DispatchQueue(label: "capture_session_queue") 
      videoDataOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) 

      videoDataOutput.alwaysDiscardsLateVideoFrames = true 

      captureSession.addOutput(videoDataOutput) 
      captureSession.beginConfiguration() 
      captureSession.commitConfiguration() 
      captureSession.startRunning() 
        print("here") 

     } catch { 
      print(error) 
     } 
    } 

    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 

     let imageBuffer: CVImageBuffer? = CMSampleBufferGetImageBuffer(sampleBuffer) 
     let sourceImage = CIImage(cvPixelBuffer: imageBuffer!, options: nil) 
     let sourceExtent: CGRect = sourceImage.extent 

     let comicEffect = CIFilter(name: "CIComicEffect") 

     let filteredImage: CIImage? = comicEffect?.outputImage 

     let sourceAspect: CGFloat = sourceExtent.size.width/sourceExtent.size.height 
     let previewAspect: CGFloat = videoPreviewViewBounds.size.width/videoPreviewViewBounds.size.height 
     // we want to maintain the aspect radio of the screen size, so we clip the video image 
     var drawRect: CGRect = sourceExtent 
     if sourceAspect > previewAspect { 
      // use full height of the video image, and center crop the width 
      drawRect.origin.x += (drawRect.size.width - drawRect.size.height * previewAspect)/2.0 
      drawRect.size.width = drawRect.size.height * previewAspect 
     } 
     else { 
      // use full width of the video image, and center crop the height 
      drawRect.origin.y += (drawRect.size.height - drawRect.size.width/previewAspect)/2.0 
      drawRect.size.height = drawRect.size.width/previewAspect 
     } 

     videoPreviewView?.bindDrawable() 

     if eaglContext != EAGLContext.current() { 
      EAGLContext.setCurrent(eaglContext) 
     } 

     glClearColor(0.5, 0.5, 0.5, 1.0) 
     glClear(GLbitfield(GL_COLOR_BUFFER_BIT)) 
     // set the blend mode to "source over" so that CI will use that 
     glEnable(GLenum(GL_BLEND)) 
     glBlendFunc(GLenum(GL_ONE), GLenum(GL_ONE_MINUS_SRC_ALPHA)) 
     if (filteredImage != nil) { 
      ciContext?.draw(filteredImage!, in: videoPreviewViewBounds, from: drawRect) 
     } 

     videoPreviewView?.display() 
    } 

    func setupDevice() { 
     let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified) 
     let devices = deviceDiscoverySession.devices 

     for device in devices { 
      if device.position == AVCaptureDevice.Position.back { 
       backCamera = device 
      } 
      else if device.position == AVCaptureDevice.Position.front { 
       frontCamera = device 
      } 
     } 

     currentCamera = backCamera 
    } 

    func setupCorrectFramerate(currentCamera: AVCaptureDevice) { 
     for vFormat in currentCamera.formats { 
      //see available types 
      //print("\(vFormat) \n") 

      var ranges = vFormat.videoSupportedFrameRateRanges as [AVFrameRateRange] 
      let frameRates = ranges[0] 

      do { 
       //set to 240fps - available types are: 30, 60, 120 and 240 and custom 
       // lower framerates cause major stuttering 
       if frameRates.maxFrameRate == 240 { 
        try currentCamera.lockForConfiguration() 
        currentCamera.activeFormat = vFormat as AVCaptureDevice.Format 
        //for custom framerate set min max activeVideoFrameDuration to whatever you like, e.g. 1 and 180 
        currentCamera.activeVideoMinFrameDuration = frameRates.minFrameDuration 
        currentCamera.activeVideoMaxFrameDuration = frameRates.maxFrameDuration 
       } 
      } 
      catch { 
       print("Could not set active format") 
       print(error) 
      } 
     } 
    } 


} 

난 그냥 빈 화면을 얻을.

답변

1

몇 가지 위에

당신은 AVCaptureVideoPreviewLayer를 사용하지만이 이미지 처리를 건너 뛰는, 화면에 직접 카메라 픽셀 캡처를 수송하기 위하여려고하고 CIFilter을하고에 코드를 잘못 필요가되지 않고 있습니다 .

귀하의 적합성은 AVCaptureVideoDataOutputSampleBufferDelegate입니다. func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)의 이름이 func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)

이므로 AVCaptureVideoPreviewLayer을 사용하지 않으므로 카메라에서 픽셀을 가져 오기 전에 허가를 요청해야합니다. 이것은 일반적으로 viewDidAppear(_:)처럼 수행됩니다

override func viewDidAppear(_ animated: Bool) { 
    super.viewDidAppear(animated) 
    if AVCaptureDevice.authorizationStatus(for: AVMediaType.video) != .authorized 
    { 
     AVCaptureDevice.requestAccess(for: AVMediaType.video, completionHandler: 
     { (authorized) in 
      DispatchQueue.main.async 
      { 
       if authorized 
       { 
        self.setupInputOutput() 
       } 
      } 
     }) 
    } 
} 

또한, 회전을 지원하는 경우 당신은 또한 당신의 didOutput 콜백 회전에 AVCaptureConnection을 업데이트해야합니다.

Screenshot

: 이러한 변경 한 후

(full source code) 코드 때문에 같은 이미지를 생성, 일

관련 문제