Home / IOS Development / ios – Making AVMutableComposition with image and video in aspect fill

ios – Making AVMutableComposition with image and video in aspect fill



7;m trying to make a new video using an image that will always be in size: CGSize (375, 667), but with a video that will be in different sizes, and with .aspectFit ‘content mode. The problem is that I can not figure out how to make the entire video composition the right size (ie the image size), and instead it is the natural size of the video with a number of weird results. (edit note: the video should be centered in the view as a normal aspectFit would do for a UIImageView for example ..)

Anyway, it’s an example of what I’m trying to achieve … note that I already have the photo and video, all I have to do is make the new video with them. And this is what it should look like:

enter image description here

Here’s the code I’m currently trying, with a placeholder image of “background” (a random image of 375, 667 in assets ..): I think I can do things around the comment “important things” wrong … but I can not find out of it at the moment: /

  func makeBirthdayCard(fromVideoAt videoURL: URL, forName name: String, onComplete: @escaping (URL?) -> Void) {
    print(videoURL)
    let asset = AVURLAsset(url: videoURL)
    let composition = AVMutableComposition()
    
    guard
      let compositionTrack = composition.addMutableTrack(
        withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid),
      let assetTrack = asset.tracks(withMediaType: .video).first
      else {
        print("Something is wrong with the asset.")
        onComplete(nil)
        return
    }
    
    do {
      let timeRange = CMTimeRange(start: .zero, duration: asset.duration)
      try compositionTrack.insertTimeRange(timeRange, of: assetTrack, at: .zero)
      
      if let audioAssetTrack = asset.tracks(withMediaType: .audio).first,
        let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
        try compositionAudioTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: .zero)
      }
    } catch {
      print(error)
      onComplete(nil)
      return
    }
    
    compositionTrack.preferredTransform = assetTrack.preferredTransform
    let videoInfo = orientation(from: assetTrack.preferredTransform)
    
    
    
    //Important stuff? general below:
    
    let videoSize: CGSize
    if videoInfo.isPortrait {
      videoSize = CGSize(width: 720, height: 1280)
    } else {
      videoSize = CGSize(width: 720, height: 1280) //720.0, 1280 tiktok default..?
    }
    
    
    
    //Background image:
    let backgroundLayer = CALayer()
    backgroundLayer.frame = CGRect(origin: .zero, size: videoSize) //videosize
    
    backgroundLayer.contents = UIImage(named: "background")?.cgImage
    backgroundLayer.contentsGravity = .resizeAspectFill
    backgroundLayer.backgroundColor = UIColor.red.cgColor

    //Video layer:
    let videoLayer = CALayer()
//    videoLayer.frame = CGRect(origin: .zero, size: CGSize(width: composition.naturalSize.width, height: composition.naturalSize.height)) //videosize
    videoLayer.backgroundColor = UIColor.yellow.cgColor
        
    print(composition.naturalSize, "<-- composition.naturalSize")
    videoLayer.frame = CGRect(origin: .zero, size: CGSize(width: videoSize.width, height: composition.naturalSize.height))//CGRect(x: 0, y: 0, width: videoSize.width, height: composition.naturalSize.height)

    
    //OutPutlayer putting thme together?
    let outputLayer = CALayer()
    outputLayer.frame = CGRect(origin: .zero, size: CGSize(width: 720, height: 1280)) //videosize
    outputLayer.backgroundColor = UIColor.white.cgColor
    outputLayer.addSublayer(backgroundLayer)
    outputLayer.addSublayer(videoLayer)
//    outputLayer.addSublayer(overlayLayer)
    
    let videoComposition = AVMutableVideoComposition()
    videoComposition.renderSize = videoSize
    videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
    videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: outputLayer)
    
    
    //Setting Up Instructiosn
    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRange(start: .zero, duration: composition.duration)
    videoComposition.instructions = [instruction]
    let layerInstruction = compositionLayerInstruction(for: compositionTrack, assetTrack: assetTrack)
    instruction.layerInstructions = [layerInstruction]
    
    
    
    //EXPORT
    guard let export = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else {
        print("Cannot create export session.")
        onComplete(nil)
        return
    }
    
    let videoName = UUID().uuidString
    let exportURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(videoName).appendingPathExtension("mp4")
    
    export.videoComposition = videoComposition
    export.outputFileType = .mov
    export.outputURL = exportURL
    
    export.exportAsynchronously {
      DispatchQueue.main.async {
        switch export.status {
        case .completed:
          onComplete(exportURL)
        default:
          print("Something went wrong during export.")
          print(export.error ?? "unknown error")
          onComplete(nil)
          break
        }
      }
    }
  }


Source link