Group Group Group Group Group Group Group Group Group

Extend video render size and add background ios swift

i see this tutorial and develop an app thats take a video and add square background . video screen size change to square size . but when i render, its contain a portion of black screen . i provide screen shot . and code . some one help me to do that ?

see this image
joy vai

my code `func videoProcess() {
asset = AVURLAsset(url: videoAssetUrl!)
//make video composition track
guard let compositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid),
let assetTrack = asset?.tracks(withMediaType: .video).first else {
return
}

    do {
        //set time to composition
        let timeRange = CMTimeRange(start: .zero, duration: asset!.duration)
        try compositionTrack.insertTimeRange(timeRange, of: assetTrack, at: .zero)
        
        //audio asset extract
        if let audioAssetTrack = asset?.tracks(withMediaType: .audio).first,
            let composionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid){
            try composionAudioTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: .zero)

        }
        
    }catch{
        print(error)
    }
    
    //composiotion transformation
    compositionTrack.preferredTransform = assetTrack.preferredTransform
    //let videoInfo = orientation(from: assetTrack.preferredTransform)
    let videoSize = assetTrack.naturalSize

// let videoSize: CGSize
// if videoInfo.isPortrait {
// videoSize = CGSize(
// width: assetTrack.naturalSize.height,
// height: assetTrack.naturalSize.width)
// } else {
// videoSize = assetTrack.naturalSize
// }
//add layer
//let videoSize = getVideoSize(assetUrl: videoAssetUrl!)
var backSize : CGSize = .zero
if videoSize.width >= videoSize.height {
backSize = CGSize(width: videoSize.width, height: videoSize.width)
}else {
backSize = CGSize(width: videoSize.height, height: videoSize.height)
}

    let backgroundLayer = CALayer()
    backgroundLayer.frame = CGRect(origin: .zero, size: backSize)
    let videoLayer = CALayer()
    videoLayer.frame = CGRect(origin: CGPoint(x: (backSize.width - videoSize.width) / 2, y: (backSize.height - videoSize.height) / 2), size: videoSize)
    
    //videoLayer.backgroundColor = UIColor.green.cgColor
    backgroundLayer.backgroundColor = UIColor.red.cgColor
    
    //backgroundLayer.contents = UIImage(named: "img")?.cgImage
    //backgroundLayer.contentsGravity = CALayerContentsGravity.resizeAspectFill
    
    let outputLayer = CALayer()
    outputLayer.frame = CGRect(origin: .zero, size: backSize)
    outputLayer.addSublayer(backgroundLayer)
    outputLayer.addSublayer(videoLayer)
    
    //video composition
    let videoComposition = AVMutableVideoComposition()
    videoComposition.renderSize = backSize
    //videoComposition.renderScale = Float(UIScreen.main.scale)
    videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
    videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: outputLayer)
    
    //video composition instruction
    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRange(start: .zero, duration: composition.duration)
    videoComposition.instructions = [instruction]
    
    //layer instrunction
    let layerInstruction = compositionLayerInstruction(for: compositionTrack, assetTrack: assetTrack)
    instruction.layerInstructions = [layerInstruction]
    
    guard let export = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else {
        print("Cannot create export session.")
        return
    }
    //let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
    //let fileUrl = paths[0].appendingPathComponent("output").appendingPathExtension("mov")
    
    let videoName = UUID().uuidString
    let exportURL = URL(fileURLWithPath: NSTemporaryDirectory())
    .appendingPathComponent(videoName)
    .appendingPathExtension("mov")
    export.videoComposition = videoComposition
    export.outputFileType = .mov
    export.outputURL = exportURL
    
    
    export.exportAsynchronously(completionHandler: {
        DispatchQueue.main.async {
            switch export.status{
            case .completed:
                
                PHPhotoLibrary.shared().performChanges({
                    PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportURL)
                }) { saved, error in
                    if saved {
                        print("Export success")
                    }else {
                        print("Export Failed")
                    }
                }
                
                
                break
            default:
                print("Export Failed")
            }
        }
    })
    
}`
2 Likes

@joysolutioncat Do you still have issues with this?

2 Likes

Hi I’m actually having a very similar issue to the one above, I’m using one of ray wanderlichs’s old tutorial on how to merge two videos using avfoundation and when ever i do that the output video is never centered, the scale aspect ratio of the video is fine its just the video isnt centered on the screen like its supposed to, any help please ?
here is my layer instruction code:

static func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImage.Orientation, isPortrait: Bool) {
var assetOrientation = UIImage.Orientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}

static func videoCompositionInstruction(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
    let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
    let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
    
    let transform = assetTrack.preferredTransform
    let assetInfo = orientationFromTransform(transform)
    
    var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
    if assetInfo.isPortrait {
        scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
        let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
        instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor), at: CMTime.zero)
    } else {
        let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
        var concat = assetTrack.preferredTransform.concatenating(scaleFactor)
            .concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
        if assetInfo.orientation == .down {
            let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
            let windowBounds = UIScreen.main.bounds
            let yFix = assetTrack.naturalSize.height + windowBounds.height
            let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
            concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
        }
        instruction.setTransform(concat, at: CMTime.zero)
    }
    
    return instruction
}

@samisays11 Do you still have issues with this?

1 Like

i have the same issue. can you help?

func mergeVideos(_ videos: Array, inArea area:CGSize, completion: @escaping (_ error: Error?, _ url:URL?) -> Swift.Void) {

// Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
let mixComposition = AVMutableComposition()

var instructionLayers : Array<AVMutableVideoCompositionLayerInstruction> = []

for asset in videos {

    // Here we are creating the AVMutableCompositionTrack. See how we are adding a new track to our AVMutableComposition.
    let track = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)

    // Now we set the length of the track equal to the length of the asset and add the asset to out newly created track at kCMTimeZero for first track and lastAssetTime for current track so video plays from the start of the track to end.
    if let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first {


        /// Hide time for this video's layer
        let opacityStartTime: CMTime = CMTimeMakeWithSeconds(0, asset.duration.timescale)
        let opacityEndTime: CMTime = CMTimeAdd(mixComposition.duration, asset.duration)
        let hideAfter: CMTime = CMTimeAdd(opacityStartTime, opacityEndTime)


        let timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
        try? track?.insertTimeRange(timeRange, of: videoTrack, at: mixComposition.duration)


        /// Layer instrcution
        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track!)
        layerInstruction.setOpacity(0.0, at: hideAfter)

        /// Add logic for aspectFit in given area
        let properties = scaleAndPositionInAspectFillMode(forTrack: videoTrack, inArea: area)


        /// Checking for orientation
        let videoOrientation: UIImageOrientation = self.getVideoOrientation(forTrack: videoTrack)
        let assetSize = self.assetSize(forTrack: videoTrack)

        if (videoOrientation == .down) {
            /// Rotate
            let defaultTransfrom = asset.preferredTransform
            let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi/2.0))

            // Scale
            let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

            // Translate
            var ytranslation: CGFloat = assetSize.height
            var xtranslation: CGFloat = 0
            if properties.position.y == 0 {
                xtranslation = -(assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
            }
            else {
                ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
            }
            let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)

            // Final transformation - Concatination
            let finalTransform = defaultTransfrom.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
            layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
        }
        else if (videoOrientation == .left) {

            /// Rotate
            let defaultTransfrom = asset.preferredTransform
            let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi))

            // Scale
            let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

            // Translate
            var ytranslation: CGFloat = assetSize.height
            var xtranslation: CGFloat = assetSize.width
            if properties.position.y == 0 {
                xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
            }
            else {
                ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
            }
            let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)

            // Final transformation - Concatination
            let finalTransform = defaultTransfrom.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
            layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
        }
        else if (videoOrientation == .right) {
            /// No need to rotate
            // Scale
            let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

            // Translate
            let translationTransform = CGAffineTransform(translationX: properties.position.x, y: properties.position.y)

            let finalTransform  = scaleTransform.concatenating(translationTransform)
            layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
        }
        else {
            /// Rotate
            let defaultTransfrom = asset.preferredTransform
            let rotateTransform = CGAffineTransform(rotationAngle: CGFloat(Double.pi/2.0))

            // Scale
            let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

            // Translate
            var ytranslation: CGFloat = 0
            var xtranslation: CGFloat = assetSize.width
            if properties.position.y == 0 {
                xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
            }
            else {
                ytranslation = -(assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
            }
            let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)

            // Final transformation - Concatination
            let finalTransform = defaultTransfrom.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
            layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
        }

        instructionLayers.append(layerInstruction)
    }
}


let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
mainInstruction.layerInstructions = instructionLayers

let mainCompositionInst = AVMutableVideoComposition()
mainCompositionInst.instructions = [mainInstruction]
mainCompositionInst.frameDuration = CMTimeMake(1, 30)
mainCompositionInst.renderSize = area

//let url = URL(fileURLWithPath: "/Users/enacteservices/Desktop/final_video.mov")
let url = self.videoOutputURL
try? FileManager.default.removeItem(at: url)

let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = url
exporter?.outputFileType = .mp4
exporter?.videoComposition = mainCompositionInst
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously(completionHandler: {
    if let anError = exporter?.error {
        completion(anError, nil)
    }
    else if exporter?.status == AVAssetExportSessionStatus.completed {
        completion(nil, url)
    }
})

}