swift: How to take screenshot of AVPlayerLayer()

How to take screenshot of AVplayerLayer. I tried with the following code it works well, it captures the entire view as it was

func screenShotMethod() {
    let window = UIApplication.shared.delegate!.window!!
    //capture the entire window into an image
    UIGraphicsBeginImageContextWithOptions(window.bounds.size, false, UIScreen.main.scale)
    window.drawHierarchy(in: window.bounds, afterScreenUpdates: false)
    let windowImage = UIGraphicsGetImageFromCurrentImageContext()
    UIGraphicsEndImageContext()
    //now position the image x/y away from the top-left corner to get the portion we want
    UIGraphicsBeginImageContext(view.frame.size)
    windowImage?.draw(at: CGPoint(x: -view.frame.origin.x, y: -view.frame.origin.y))
    let croppedImage: UIImage = UIGraphicsGetImageFromCurrentImageContext()!
    UIGraphicsEndImageContext();
    //embed image in an imageView, supports transforms.
    let resultImageView = UIImageView(image: croppedImage)
    UIImageWriteToSavedPhotosAlbum(croppedImage, nil, nil, nil)
}

But the problem is when i tried the same code running on iPhone(device) it returns black image.i don’t know what was wrong

  • Proper header php mysql blob display image
  • Image/Text overlay in video swift
  • Storing NSManagedObject in a dictionary (NSDictionary)
  • Objective-c/iOS - Open local html file with Safari
  • How to fix “no valid 'aps-environment' entitlement string found for application” in Xcode 4.3?
  • UISearchBar presented by UISearchController in table header view animates too far when active
  • Any suggestions would be greatly helpful!

    Solutions Collect From Internet About “swift: How to take screenshot of AVPlayerLayer()”

    A few days ago, we also ran into the same issue. Where, if we take the screenshot of a screen which has a video player in it; The screenshot looks fine in the simulator.But, on the device, it was a black screen.

    After a lot of attempts, I failed and finally end up with a patch (not sure if it is a correct way of solving the problem). But, the solution did the trick and I was able to get the screenshot on the device as well and simulator as well.

    Following is a way I used to solve the issue.

    1 -> Get a single frame at current time from the video (Public method is already available for this)

    2 -> Use this thumbnail in the place of CALayer (add it to hierarchy)

    3 -> Once we are done remove the thumbnail from the memory (remove from hierarchy)

    Following is a demo sample for the same (The given solution is in Objective-c though the question asked is in Swift).

    Objective – C solution

      - (void)SnapShot {
           UIImage *capturedImage = [self getASnapShotWithAVLayer];
        }
        - (UIImage *)getASnapShotWithAVLayer {
            //Add temporary thumbnail One
            UIImageView *temporaryViewForVideoOne = [[UIImageView alloc] initWithFrame:self.videoViewOne.bounds];
            temporaryViewForVideoOne.contentMode = UIViewContentModeScaleAspectFill;
            UIImage *imageFromCurrentTimeForVideoOne = [self takeVideoSnapShot:_playerItem1];
            int orientationFromVideoForVideoOne = [self getTheActualOrientationOfVideo:self.playerItem1];
            if(orientationFromVideoForVideoOne == 0)
            {
                orientationFromVideoForVideoOne = 3;
            }
            else if (orientationFromVideoForVideoOne == 90)
            {
                orientationFromVideoForVideoOne = 0;
            }
            imageFromCurrentTimeForVideoOne =
            [UIImage imageWithCGImage:[imageFromCurrentTimeForVideoOne CGImage]
                                scale:[imageFromCurrentTimeForVideoOne scale]
                          orientation: orientationFromVideoForVideoOne];
            UIImage *rotatedImageFromCurrentContextForVideoOne = [self normalizedImage:imageFromCurrentTimeForVideoOne];
            temporaryViewForVideoOne.clipsToBounds = YES;
            temporaryViewForVideoOne.image = rotatedImageFromCurrentContextForVideoOne;
            [self.videoViewOne addSubview:temporaryViewForVideoOne];
            CGSize imageSize = CGSizeZero;
            UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
            if (UIInterfaceOrientationIsPortrait(orientation)) {
                imageSize = [UIScreen mainScreen].bounds.size;
            } else {
                imageSize = CGSizeMake([UIScreen mainScreen].bounds.size.height, [UIScreen mainScreen].bounds.size.width);
            }
    
            UIGraphicsBeginImageContextWithOptions(imageSize, NO, [[UIScreen mainScreen] scale]);
            CGContextRef context = UIGraphicsGetCurrentContext();
            for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
                CGContextSaveGState(context);
                CGContextTranslateCTM(context, window.center.x, window.center.y);
                CGContextConcatCTM(context, window.transform);
                CGContextTranslateCTM(context, -window.bounds.size.width * window.layer.anchorPoint.x, -window.bounds.size.height * window.layer.anchorPoint.y);
                if (orientation == UIInterfaceOrientationLandscapeLeft) {
                    CGContextRotateCTM(context, M_PI_2);
                    CGContextTranslateCTM(context, 0, -imageSize.width);
                } else if (orientation == UIInterfaceOrientationLandscapeRight) {
                    CGContextRotateCTM(context, -M_PI_2);
                    CGContextTranslateCTM(context, -imageSize.height, 0);
                } else if (orientation == UIInterfaceOrientationPortraitUpsideDown) {
                    CGContextRotateCTM(context, M_PI);
                    CGContextTranslateCTM(context, -imageSize.width, -imageSize.height);
                }
                if (![window respondsToSelector:@selector(drawViewHierarchyInRect:afterScreenUpdates:)]) {
                    [window drawViewHierarchyInRect:window.bounds afterScreenUpdates:YES];
                } else {
                    [window drawViewHierarchyInRect:window.bounds afterScreenUpdates:YES];
                }
                CGContextRestoreGState(context);
            }
            UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
            UIGraphicsEndImageContext();
            [temporaryViewForVideoOne removeFromSuperview];
            [temporaryViewForVideoTwo removeFromSuperview];
            return image;
        }
        -(UIImage *)takeVideoSnapShot: (AVPlayerItem *) playerItem{
            AVURLAsset *asset = (AVURLAsset *) playerItem.asset;
            AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
            imageGenerator.requestedTimeToleranceAfter = kCMTimeZero;
            imageGenerator.requestedTimeToleranceBefore = kCMTimeZero;
            CGImageRef thumb = [imageGenerator copyCGImageAtTime:playerItem.currentTime
                                                      actualTime:NULL
                                                           error:NULL];
            UIImage *videoImage = [UIImage imageWithCGImage:thumb];
            CGImageRelease(thumb);
            return videoImage;
        }
        -(int)getTheActualOrientationOfVideo:(AVPlayerItem *)playerItem
        {
            AVAsset *asset = playerItem.asset;
            NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
            AVAssetTrack *track = [tracks objectAtIndex:0];
            CGAffineTransform videoAssetOrientation_ = [track preferredTransform];
            CGFloat videoAngle  = RadiansToDegrees(atan2(videoAssetOrientation_.b, videoAssetOrientation_.a));
            int  orientation = 0;
            switch ((int)videoAngle) {
                case 0:
                    orientation = UIImageOrientationRight;
                    break;
                case 90:
                    orientation = UIImageOrientationUp;
                    break;
                case 180:
                    orientation = UIImageOrientationLeft;
                    break;
                case -90:
                    orientation = UIImageOrientationDown;
                    break;
                default:
                    //Not found
                    break;
            }
            return orientation;
        }
        - (UIImage *)normalizedImage:(UIImage *)imageOf {
            if (imageOf.imageOrientation == UIImageOrientationUp) return imageOf;
    
            UIGraphicsBeginImageContextWithOptions(imageOf.size, NO, imageOf.scale);
            [imageOf drawInRect:(CGRect){0, 0, imageOf.size}];
            UIImage *normalizedImage = UIGraphicsGetImageFromCurrentImageContext();
            UIGraphicsEndImageContext();
            return normalizedImage;
        }
    

    Swift solution

    func snapShot() {
        let capturedImage: UIImage? = getASnapShotWithAVLayer()
    }
    
    func getASnapShotWithAVLayer() -> UIImage {
        //Add temporary thumbnail One
        let temporaryViewForVideoOne = UIImageView(frame: videoViewOne.bounds) //replace videoViewOne with you view which is showing AVPlayerContent
        temporaryViewForVideoOne.contentMode = .scaleAspectFill
        var imageFromCurrentTimeForVideoOne: UIImage? = takeVideoSnapShot(playerItem1)
        var orientationFromVideoForVideoOne: Int = getTheActualOrientationOfVideo(playerItem1)
        if orientationFromVideoForVideoOne == 0 {
            orientationFromVideoForVideoOne = 3
        }
        else if orientationFromVideoForVideoOne == 90 {
            orientationFromVideoForVideoOne = 0
        }
    
        imageFromCurrentTimeForVideoOne = UIImage(cgImage: imageFromCurrentTimeForVideoOne?.cgImage, scale: imageFromCurrentTimeForVideoOne?.scale, orientation: orientationFromVideoForVideoOne)
        let rotatedImageFromCurrentContextForVideoOne: UIImage? = normalizedImage(imageFromCurrentTimeForVideoOne)
        temporaryViewForVideoOne.clipsToBounds = true
        temporaryViewForVideoOne.image = rotatedImageFromCurrentContextForVideoOne
        videoViewOne.addSubview(temporaryViewForVideoOne) //Replace videoViewOne with your view containing AVPlayer
        var imageSize = CGSize.zero
        let orientation: UIInterfaceOrientation = UIApplication.shared.statusBarOrientation
        if UIInterfaceOrientationIsPortrait(orientation) {
            imageSize = UIScreen.main.bounds.size
        }
        else {
            imageSize = CGSize(width: CGFloat(UIScreen.main.bounds.size.height), height: CGFloat(UIScreen.main.bounds.size.width))
        }
        UIGraphicsBeginImageContextWithOptions(imageSize, false, UIScreen.main.scale())
        let context: CGContext? = UIGraphicsGetCurrentContext()
        for window: UIWindow in UIApplication.shared.windows {
            context.saveGState()
            context.translateBy(x: window.center.x, y: window.center.y)
            context.concatenate(window.transform)
            context.translateBy(x: -window.bounds.size.width * window.layer.anchorPoint.x, y: -window.bounds.size.height * window.layer.anchorPoint.y)
            if orientation == .landscapeLeft {
                context.rotate(by: M_PI_2)
                context.translateBy(x: 0, y: -imageSize.width)
            }
            else if orientation == .landscapeRight {
                context.rotate(by: -M_PI_2)
                context.translateBy(x: -imageSize.height, y: 0)
            }
            else if orientation == .portraitUpsideDown {
                context.rotate(by: .pi)
                context.translateBy(x: -imageSize.width, y: -imageSize.height)
            }
    
            if !window.responds(to: Selector("drawViewHierarchyInRect:afterScreenUpdates:")) {
                window.drawHierarchy(in: window.bounds, afterScreenUpdates: true)
            }
            else {
                window.drawHierarchy(in: window.bounds, afterScreenUpdates: true)
            }
            context.restoreGState()
        }
        let image: UIImage? = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        temporaryViewForVideoOne.removeFromSuperview()
        return image!
    }
    
    func takeVideoSnapShot(_ playerItem: AVPlayerItem) -> UIImage {
        let asset: AVURLAsset? = (playerItem.asset as? AVURLAsset)
        let imageGenerator = AVAssetImageGenerator(asset)
        imageGenerator.requestedTimeToleranceAfter = kCMTimeZero
        imageGenerator.requestedTimeToleranceBefore = kCMTimeZero
        let thumb: CGImageRef? = try? imageGenerator.copyCGImage(atTime: playerItem.currentTime(), actualTime: nil)
        let videoImage = UIImage(cgImage: thumb)
        CGImageRelease(thumb)
        return videoImage
    }
    
    func getTheActualOrientationOfVideo(_ playerItem: AVPlayerItem) -> Int {
        let asset: AVAsset? = playerItem.asset
        let tracks: [Any]? = asset?.tracks(withMediaType: AVMediaTypeVideo)
        let track: AVAssetTrack? = (tracks?[0] as? AVAssetTrack)
        let videoAssetOrientation_: CGAffineTransform? = track?.preferredTransform
        let videoAngle: CGFloat? = RadiansToDegrees(atan2(videoAssetOrientation_?.b, videoAssetOrientation_?.a))
        var orientation: Int = 0
        switch Int(videoAngle) {
            case 0:
                orientation = .right
            case 90:
                orientation = .up
            case 180:
                orientation = .left
            case -90:
                orientation = .down
            default:
                //Not found
        }
        return orientation
    }
    
    func normalizedImage(_ imageOf: UIImage) -> UIImage {
        if imageOf.imageOrientation == .up {
            return imageOf
        }
        UIGraphicsBeginImageContextWithOptions(imageOf.size, false, imageOf.scale)
        imageOf.draw(in: (CGRect))
        let normalizedImage: UIImage? = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        return normalizedImage!
    }