Group Group Group Group Group Group Group Group Group

Extend video render size and add background ios swift

i see this tutorial and develop an app thats take a video and add square background . video screen size change to square size . but when i render, its contain a portion of black screen . i provide screen shot . and code . some one help me to do that ?

see this image
joy vai

my code `func videoProcess() {
asset = AVURLAsset(url: videoAssetUrl!)
//make video composition track
guard let compositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid),
let assetTrack = asset?.tracks(withMediaType: .video).first else {
return
}

    do {
        //set time to composition
        let timeRange = CMTimeRange(start: .zero, duration: asset!.duration)
        try compositionTrack.insertTimeRange(timeRange, of: assetTrack, at: .zero)
        
        //audio asset extract
        if let audioAssetTrack = asset?.tracks(withMediaType: .audio).first,
            let composionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid){
            try composionAudioTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: .zero)

        }
        
    }catch{
        print(error)
    }
    
    //composiotion transformation
    compositionTrack.preferredTransform = assetTrack.preferredTransform
    //let videoInfo = orientation(from: assetTrack.preferredTransform)
    let videoSize = assetTrack.naturalSize

// let videoSize: CGSize
// if videoInfo.isPortrait {
// videoSize = CGSize(
// width: assetTrack.naturalSize.height,
// height: assetTrack.naturalSize.width)
// } else {
// videoSize = assetTrack.naturalSize
// }
//add layer
//let videoSize = getVideoSize(assetUrl: videoAssetUrl!)
var backSize : CGSize = .zero
if videoSize.width >= videoSize.height {
backSize = CGSize(width: videoSize.width, height: videoSize.width)
}else {
backSize = CGSize(width: videoSize.height, height: videoSize.height)
}

    let backgroundLayer = CALayer()
    backgroundLayer.frame = CGRect(origin: .zero, size: backSize)
    let videoLayer = CALayer()
    videoLayer.frame = CGRect(origin: CGPoint(x: (backSize.width - videoSize.width) / 2, y: (backSize.height - videoSize.height) / 2), size: videoSize)
    
    //videoLayer.backgroundColor = UIColor.green.cgColor
    backgroundLayer.backgroundColor = UIColor.red.cgColor
    
    //backgroundLayer.contents = UIImage(named: "img")?.cgImage
    //backgroundLayer.contentsGravity = CALayerContentsGravity.resizeAspectFill
    
    let outputLayer = CALayer()
    outputLayer.frame = CGRect(origin: .zero, size: backSize)
    outputLayer.addSublayer(backgroundLayer)
    outputLayer.addSublayer(videoLayer)
    
    //video composition
    let videoComposition = AVMutableVideoComposition()
    videoComposition.renderSize = backSize
    //videoComposition.renderScale = Float(UIScreen.main.scale)
    videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
    videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: outputLayer)
    
    //video composition instruction
    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRange(start: .zero, duration: composition.duration)
    videoComposition.instructions = [instruction]
    
    //layer instrunction
    let layerInstruction = compositionLayerInstruction(for: compositionTrack, assetTrack: assetTrack)
    instruction.layerInstructions = [layerInstruction]
    
    guard let export = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else {
        print("Cannot create export session.")
        return
    }
    //let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
    //let fileUrl = paths[0].appendingPathComponent("output").appendingPathExtension("mov")
    
    let videoName = UUID().uuidString
    let exportURL = URL(fileURLWithPath: NSTemporaryDirectory())
    .appendingPathComponent(videoName)
    .appendingPathExtension("mov")
    export.videoComposition = videoComposition
    export.outputFileType = .mov
    export.outputURL = exportURL
    
    
    export.exportAsynchronously(completionHandler: {
        DispatchQueue.main.async {
            switch export.status{
            case .completed:
                
                PHPhotoLibrary.shared().performChanges({
                    PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportURL)
                }) { saved, error in
                    if saved {
                        print("Export success")
                    }else {
                        print("Export Failed")
                    }
                }
                
                
                break
            default:
                print("Export Failed")
            }
        }
    })
    
}`
1 Like

@joysolutioncat Do you still have issues with this?

1 Like

Hi I’m actually having a very similar issue to the one above, I’m using one of ray wanderlichs’s old tutorial on how to merge two videos using avfoundation and when ever i do that the output video is never centered, the scale aspect ratio of the video is fine its just the video isnt centered on the screen like its supposed to, any help please ?
here is my layer instruction code:

static func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImage.Orientation, isPortrait: Bool) {
var assetOrientation = UIImage.Orientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}

static func videoCompositionInstruction(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
    let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
    let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
    
    let transform = assetTrack.preferredTransform
    let assetInfo = orientationFromTransform(transform)
    
    var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
    if assetInfo.isPortrait {
        scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
        let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
        instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor), at: CMTime.zero)
    } else {
        let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
        var concat = assetTrack.preferredTransform.concatenating(scaleFactor)
            .concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
        if assetInfo.orientation == .down {
            let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
            let windowBounds = UIScreen.main.bounds
            let yFix = assetTrack.naturalSize.height + windowBounds.height
            let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
            concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
        }
        instruction.setTransform(concat, at: CMTime.zero)
    }
    
    return instruction
}