Home / IOS Development / ios – AVExportSession exports video super slow

ios – AVExportSession exports video super slow



7;m trying to speed up my export of the merged video.

Here is the code: // from my extensive research online and on SO I have pretty much come down to the preset PassThrough does it super fast, but as I wrote in a comment in the code, my merge code does not seem to work with it export preset: /

            static func videoCompositionInstruction(_ track: AVCompositionTrack, asset: AVAsset)
            -> AVMutableVideoCompositionLayerInstruction {
                let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
                let assetTrack = asset.tracks(withMediaType: .video)[0]

                let transform = assetTrack.preferredTransform
                let assetInfo = orientationFromTransform(transform)

                var scaleToFitRatio = 1080 / assetTrack.naturalSize.width
                if assetInfo.isPortrait {
                    scaleToFitRatio = 1080 / assetTrack.naturalSize.height
                    let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
                    var finalTransform = assetTrack.preferredTransform.concatenating(scaleFactor)
                    //was needed in my case (if video not taking entire screen and leaving some parts black - don't know when actually needed so you'll have to try and see when it's needed)
                    if assetInfo.orientation == .rightMirrored || assetInfo.orientation == .leftMirrored {
                        finalTransform = finalTransform.translatedBy(x: -transform.ty, y: 0)
                    }
                    instruction.setTransform(finalTransform, at: CMTime.zero)
                } else {
                    let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
                    var concat = assetTrack.preferredTransform.concatenating(scaleFactor)
                        .concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
                    if assetInfo.orientation == .down {
                        let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                        let windowBounds = UIScreen.main.bounds
                        let yFix = assetTrack.naturalSize.height + windowBounds.height
                        let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
                        concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
                    }
                    instruction.setTransform(concat, at: CMTime.zero)
                }

                return instruction
        }

        static func orientationFromTransform(_ transform: CGAffineTransform)
            -> (orientation: UIImage.Orientation, isPortrait: Bool) {
                var assetOrientation = UIImage.Orientation.up
                var isPortrait = false
                if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
                    assetOrientation = .right
                    isPortrait = true
                } else if transform.a == 0 && transform.b == 1.0 && transform.c == 1.0 && transform.d == 0 {
                    assetOrientation = .rightMirrored
                    isPortrait = true
                } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
                    assetOrientation = .left
                    isPortrait = true
                } else if transform.a == 0 && transform.b == -1.0 && transform.c == -1.0 && transform.d == 0 {
                    assetOrientation = .leftMirrored
                    isPortrait = true
                } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
                    assetOrientation = .up
                } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
                    assetOrientation = .down
                }
                return (assetOrientation, isPortrait)
        }

    func mergeVideosTestSQ(arrayVideos:[AVAsset], completion:@escaping (URL?, Error?) -> ()) {
            
            let mixComposition = AVMutableComposition()

            
            var instructions: [AVMutableVideoCompositionLayerInstruction] = []
            var insertTime = CMTime(seconds: 0, preferredTimescale: 1)

            /// for each URL add the video and audio tracks and their duration to the composition
            for sourceAsset in arrayVideos {
                    
                let frameRange = CMTimeRange(start: CMTime(seconds: 0, preferredTimescale: 1), duration: sourceAsset.duration)

                guard
                    let nthVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)),
                    let nthAudioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), //0 used to be kCMPersistentTrackID_Invalid
                    let assetVideoTrack = sourceAsset.tracks(withMediaType: .video).first
                else {
                    print("didnt work")
                    return
                }

                var assetAudioTrack: AVAssetTrack?
                assetAudioTrack = sourceAsset.tracks(withMediaType: .audio).first
                print(assetAudioTrack, ",-- assetAudioTrack???", assetAudioTrack?.asset, "<-- hes", sourceAsset)
                
                do {
                    
                    try nthVideoTrack.insertTimeRange(frameRange, of: assetVideoTrack, at: insertTime)
                    try nthAudioTrack.insertTimeRange(frameRange, of: assetAudioTrack!, at: insertTime)
    
                    //instructions:
                    let nthInstruction = MainCamVC.videoCompositionInstruction(nthVideoTrack, asset: sourceAsset)
                    nthInstruction.setOpacity(0.0, at: CMTimeAdd(insertTime, sourceAsset.duration)) //sourceasset.duration

                    instructions.append(nthInstruction)
                    insertTime = insertTime + sourceAsset.duration //sourceAsset.duration

                    
                    
                } catch {
                    DispatchQueue.main.async {
                        print("didnt wor2k")
                    }
                }

            }
        

            let mainInstruction = AVMutableVideoCompositionInstruction()
            mainInstruction.timeRange = CMTimeRange(start: CMTime(seconds: 0, preferredTimescale: 1), duration: insertTime)

            mainInstruction.layerInstructions = instructions

            let mainComposition = AVMutableVideoComposition()
            mainComposition.instructions = [mainInstruction]
            mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
            mainComposition.renderSize = CGSize(width: 1080, height: 1920)

            let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge.mp4")

            //below to clear the video form docuent folder for new vid...
            let fileManager = FileManager()
            try? fileManager.removeItem(at: outputFileURL)
            
        
            /// try to start an export session and set the path and file type
            if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) { //DOES NOT WORK WITH AVAssetExportPresetPassthrough
                exportSession.outputFileType = .mov
                exportSession.outputURL = outputFileURL
                exportSession.videoComposition = mainComposition
                exportSession.shouldOptimizeForNetworkUse = true

                /// try to export the file and handle the status cases
                exportSession.exportAsynchronously {
                    if let url = exportSession.outputURL{
                      completion(url, nil)
                  }
                    if let error = exportSession.error {
                      completion(nil, error)
                  }
                }
                
            }

        }

Note I have instructions for preserving the correct directions.

Thanks for the help! I just need to be faster, it takes about videoDuration / 2 seconds to export in time to export ...


Source link