2017-02-03 3 views
3

AVplayerLayer의 스크린 샷을 찍는 방법. 나는 그것이 잘 작동 다음 코드로 시도, 그것이swift : AVPlayerLayer()의 스크린 샷을 찍는 방법

func screenShotMethod() { 
    let window = UIApplication.shared.delegate!.window!! 
    //capture the entire window into an image 
    UIGraphicsBeginImageContextWithOptions(window.bounds.size, false, UIScreen.main.scale) 
    window.drawHierarchy(in: window.bounds, afterScreenUpdates: false) 
    let windowImage = UIGraphicsGetImageFromCurrentImageContext() 
    UIGraphicsEndImageContext() 
    //now position the image x/y away from the top-left corner to get the portion we want 
    UIGraphicsBeginImageContext(view.frame.size) 
    windowImage?.draw(at: CGPoint(x: -view.frame.origin.x, y: -view.frame.origin.y)) 
    let croppedImage: UIImage = UIGraphicsGetImageFromCurrentImageContext()! 
    UIGraphicsEndImageContext(); 
    //embed image in an imageView, supports transforms. 
    let resultImageView = UIImageView(image: croppedImage) 
    UIImageWriteToSavedPhotosAlbum(croppedImage, nil, nil, nil) 
} 

이었다로 전체보기를 캡처하지만 난 아이폰에서 실행되는 동일한 코드를 시도 할 때 문제가 (장치)는 검은 색 어 이미지를하지 반환 무엇이 잘못되었는지 알아보기

어떤 제안이 큰 도움이 될 것입니다!

+0

의 사용 가능한 복제 (HTTP : // 유래 [I가있는 UIView의 스크린 샷을 어떻게해야합니까?]. com/questions/2214957/how-do-take-a-screen-a-uiview) –

+0

이 글을 살펴보기 : http://stackoverflow.com/questions/23286252/screenshot-for-avplayer- and-video –

+0

변경 'afterScreenUp 날짜 '를'사실 '로 설정하고 시도하십시오. –

답변

2

며칠 전에 같은 문제가 발생했습니다. 우리가 비디오 플레이어가있는 스크린의 스크린 샷을 찍는다면, 스크린 샷은 시뮬레이터에서 잘 보입니다.하지만 기기에서는 검은 색 화면이었습니다.

많은 시도 후에 나는 실패하고 마지막으로 패치로 끝납니다 (문제를 해결하는 올바른 방법인지는 확실하지 않음). 그러나 해결책은 트릭을 만들었고 장치 및 시뮬레이터에서도 스크린 샷을 얻을 수있었습니다.

다음은 내가이 문제를 해결하는 데 사용한 방법입니다.

1 ->

2 (Public 메서드이 이미 가능) 비디오에서 현재 시간에서 하나의 프레임을 가져 오기 ->

의 CALayer의 장소에 미리보기를 사용 (계층 구조에 추가)

3 -> 작업이 완료되면 메모리에서 축소판을 제거하십시오 (계층 구조에서 제거)

다음은 데모 샘플입니다

목표 - C 솔루션

- (void)SnapShot { 
     UIImage *capturedImage = [self getASnapShotWithAVLayer]; 
    } 
    - (UIImage *)getASnapShotWithAVLayer { 
     //Add temporary thumbnail One 
     UIImageView *temporaryViewForVideoOne = [[UIImageView alloc] initWithFrame:self.videoViewOne.bounds]; 
     temporaryViewForVideoOne.contentMode = UIViewContentModeScaleAspectFill; 
     UIImage *imageFromCurrentTimeForVideoOne = [self takeVideoSnapShot:_playerItem1]; 
     int orientationFromVideoForVideoOne = [self getTheActualOrientationOfVideo:self.playerItem1]; 
     if(orientationFromVideoForVideoOne == 0) 
     { 
      orientationFromVideoForVideoOne = 3; 
     } 
     else if (orientationFromVideoForVideoOne == 90) 
     { 
      orientationFromVideoForVideoOne = 0; 
     } 
     imageFromCurrentTimeForVideoOne = 
     [UIImage imageWithCGImage:[imageFromCurrentTimeForVideoOne CGImage] 
          scale:[imageFromCurrentTimeForVideoOne scale] 
         orientation: orientationFromVideoForVideoOne]; 
     UIImage *rotatedImageFromCurrentContextForVideoOne = [self normalizedImage:imageFromCurrentTimeForVideoOne]; 
     temporaryViewForVideoOne.clipsToBounds = YES; 
     temporaryViewForVideoOne.image = rotatedImageFromCurrentContextForVideoOne; 
     [self.videoViewOne addSubview:temporaryViewForVideoOne]; 
     CGSize imageSize = CGSizeZero; 
     UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation]; 
     if (UIInterfaceOrientationIsPortrait(orientation)) { 
      imageSize = [UIScreen mainScreen].bounds.size; 
     } else { 
      imageSize = CGSizeMake([UIScreen mainScreen].bounds.size.height, [UIScreen mainScreen].bounds.size.width); 
     } 

     UIGraphicsBeginImageContextWithOptions(imageSize, NO, [[UIScreen mainScreen] scale]); 
     CGContextRef context = UIGraphicsGetCurrentContext(); 
     for (UIWindow *window in [[UIApplication sharedApplication] windows]) { 
      CGContextSaveGState(context); 
      CGContextTranslateCTM(context, window.center.x, window.center.y); 
      CGContextConcatCTM(context, window.transform); 
      CGContextTranslateCTM(context, -window.bounds.size.width * window.layer.anchorPoint.x, -window.bounds.size.height * window.layer.anchorPoint.y); 
      if (orientation == UIInterfaceOrientationLandscapeLeft) { 
       CGContextRotateCTM(context, M_PI_2); 
       CGContextTranslateCTM(context, 0, -imageSize.width); 
      } else if (orientation == UIInterfaceOrientationLandscapeRight) { 
       CGContextRotateCTM(context, -M_PI_2); 
       CGContextTranslateCTM(context, -imageSize.height, 0); 
      } else if (orientation == UIInterfaceOrientationPortraitUpsideDown) { 
       CGContextRotateCTM(context, M_PI); 
       CGContextTranslateCTM(context, -imageSize.width, -imageSize.height); 
      } 
      if (![window respondsToSelector:@selector(drawViewHierarchyInRect:afterScreenUpdates:)]) { 
       [window drawViewHierarchyInRect:window.bounds afterScreenUpdates:YES]; 
      } else { 
       [window drawViewHierarchyInRect:window.bounds afterScreenUpdates:YES]; 
      } 
      CGContextRestoreGState(context); 
     } 
     UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); 
     UIGraphicsEndImageContext(); 
     [temporaryViewForVideoOne removeFromSuperview]; 
     [temporaryViewForVideoTwo removeFromSuperview]; 
     return image; 
    } 
    -(UIImage *)takeVideoSnapShot: (AVPlayerItem *) playerItem{ 
     AVURLAsset *asset = (AVURLAsset *) playerItem.asset; 
     AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset]; 
     imageGenerator.requestedTimeToleranceAfter = kCMTimeZero; 
     imageGenerator.requestedTimeToleranceBefore = kCMTimeZero; 
     CGImageRef thumb = [imageGenerator copyCGImageAtTime:playerItem.currentTime 
                actualTime:NULL 
                 error:NULL]; 
     UIImage *videoImage = [UIImage imageWithCGImage:thumb]; 
     CGImageRelease(thumb); 
     return videoImage; 
    } 
    -(int)getTheActualOrientationOfVideo:(AVPlayerItem *)playerItem 
    { 
     AVAsset *asset = playerItem.asset; 
     NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo]; 
     AVAssetTrack *track = [tracks objectAtIndex:0]; 
     CGAffineTransform videoAssetOrientation_ = [track preferredTransform]; 
     CGFloat videoAngle = RadiansToDegrees(atan2(videoAssetOrientation_.b, videoAssetOrientation_.a)); 
     int orientation = 0; 
     switch ((int)videoAngle) { 
      case 0: 
       orientation = UIImageOrientationRight; 
       break; 
      case 90: 
       orientation = UIImageOrientationUp; 
       break; 
      case 180: 
       orientation = UIImageOrientationLeft; 
       break; 
      case -90: 
       orientation = UIImageOrientationDown; 
       break; 
      default: 
       //Not found 
       break; 
     } 
     return orientation; 
    } 
    - (UIImage *)normalizedImage:(UIImage *)imageOf { 
     if (imageOf.imageOrientation == UIImageOrientationUp) return imageOf; 

     UIGraphicsBeginImageContextWithOptions(imageOf.size, NO, imageOf.scale); 
     [imageOf drawInRect:(CGRect){0, 0, imageOf.size}]; 
     UIImage *normalizedImage = UIGraphicsGetImageFromCurrentImageContext(); 
     UIGraphicsEndImageContext(); 
     return normalizedImage; 
    } 

스위프트 솔루션

func snapShot() { 
    let capturedImage: UIImage? = getASnapShotWithAVLayer() 
} 

func getASnapShotWithAVLayer() -> UIImage { 
    //Add temporary thumbnail One 
    let temporaryViewForVideoOne = UIImageView(frame: videoViewOne.bounds) //replace videoViewOne with you view which is showing AVPlayerContent 
    temporaryViewForVideoOne.contentMode = .scaleAspectFill 
    var imageFromCurrentTimeForVideoOne: UIImage? = takeVideoSnapShot(playerItem1) 
    var orientationFromVideoForVideoOne: Int = getTheActualOrientationOfVideo(playerItem1) 
    if orientationFromVideoForVideoOne == 0 { 
     orientationFromVideoForVideoOne = 3 
    } 
    else if orientationFromVideoForVideoOne == 90 { 
     orientationFromVideoForVideoOne = 0 
    } 

    imageFromCurrentTimeForVideoOne = UIImage(cgImage: imageFromCurrentTimeForVideoOne?.cgImage, scale: imageFromCurrentTimeForVideoOne?.scale, orientation: orientationFromVideoForVideoOne) 
    let rotatedImageFromCurrentContextForVideoOne: UIImage? = normalizedImage(imageFromCurrentTimeForVideoOne) 
    temporaryViewForVideoOne.clipsToBounds = true 
    temporaryViewForVideoOne.image = rotatedImageFromCurrentContextForVideoOne 
    videoViewOne.addSubview(temporaryViewForVideoOne) //Replace videoViewOne with your view containing AVPlayer 
    var imageSize = CGSize.zero 
    let orientation: UIInterfaceOrientation = UIApplication.shared.statusBarOrientation 
    if UIInterfaceOrientationIsPortrait(orientation) { 
     imageSize = UIScreen.main.bounds.size 
    } 
    else { 
     imageSize = CGSize(width: CGFloat(UIScreen.main.bounds.size.height), height: CGFloat(UIScreen.main.bounds.size.width)) 
    } 
    UIGraphicsBeginImageContextWithOptions(imageSize, false, UIScreen.main.scale()) 
    let context: CGContext? = UIGraphicsGetCurrentContext() 
    for window: UIWindow in UIApplication.shared.windows { 
     context.saveGState() 
     context.translateBy(x: window.center.x, y: window.center.y) 
     context.concatenate(window.transform) 
     context.translateBy(x: -window.bounds.size.width * window.layer.anchorPoint.x, y: -window.bounds.size.height * window.layer.anchorPoint.y) 
     if orientation == .landscapeLeft { 
      context.rotate(by: M_PI_2) 
      context.translateBy(x: 0, y: -imageSize.width) 
     } 
     else if orientation == .landscapeRight { 
      context.rotate(by: -M_PI_2) 
      context.translateBy(x: -imageSize.height, y: 0) 
     } 
     else if orientation == .portraitUpsideDown { 
      context.rotate(by: .pi) 
      context.translateBy(x: -imageSize.width, y: -imageSize.height) 
     } 

     if !window.responds(to: Selector("drawViewHierarchyInRect:afterScreenUpdates:")) { 
      window.drawHierarchy(in: window.bounds, afterScreenUpdates: true) 
     } 
     else { 
      window.drawHierarchy(in: window.bounds, afterScreenUpdates: true) 
     } 
     context.restoreGState() 
    } 
    let image: UIImage? = UIGraphicsGetImageFromCurrentImageContext() 
    UIGraphicsEndImageContext() 
    temporaryViewForVideoOne.removeFromSuperview() 
    return image! 
} 

func takeVideoSnapShot(_ playerItem: AVPlayerItem) -> UIImage { 
    let asset: AVURLAsset? = (playerItem.asset as? AVURLAsset) 
    let imageGenerator = AVAssetImageGenerator(asset) 
    imageGenerator.requestedTimeToleranceAfter = kCMTimeZero 
    imageGenerator.requestedTimeToleranceBefore = kCMTimeZero 
    let thumb: CGImageRef? = try? imageGenerator.copyCGImage(atTime: playerItem.currentTime(), actualTime: nil) 
    let videoImage = UIImage(cgImage: thumb) 
    CGImageRelease(thumb) 
    return videoImage 
} 

func getTheActualOrientationOfVideo(_ playerItem: AVPlayerItem) -> Int { 
    let asset: AVAsset? = playerItem.asset 
    let tracks: [Any]? = asset?.tracks(withMediaType: AVMediaTypeVideo) 
    let track: AVAssetTrack? = (tracks?[0] as? AVAssetTrack) 
    let videoAssetOrientation_: CGAffineTransform? = track?.preferredTransform 
    let videoAngle: CGFloat? = RadiansToDegrees(atan2(videoAssetOrientation_?.b, videoAssetOrientation_?.a)) 
    var orientation: Int = 0 
    switch Int(videoAngle) { 
     case 0: 
      orientation = .right 
     case 90: 
      orientation = .up 
     case 180: 
      orientation = .left 
     case -90: 
      orientation = .down 
     default: 
      //Not found 
    } 
    return orientation 
} 

func normalizedImage(_ imageOf: UIImage) -> UIImage { 
    if imageOf.imageOrientation == .up { 
     return imageOf 
    } 
    UIGraphicsBeginImageContextWithOptions(imageOf.size, false, imageOf.scale) 
    imageOf.draw(in: (CGRect)) 
    let normalizedImage: UIImage? = UIGraphicsGetImageFromCurrentImageContext() 
    UIGraphicsEndImageContext() 
    return normalizedImage! 
} 
+0

신속한 솔루션을 기다려 주셔서 감사합니다. – Raghuram

+0

안녕하세요 @Raghuram 신속한 솔루션을 업데이트했습니다 – PrafulD

+0

이 시도해 주셔서 감사합니다 – Raghuram

관련 문제