【问题标题】:How do I merge 2 videos with a fade transition?如何合并 2 个带有淡入淡出过渡的视频?
【发布时间】:2019-10-15 03:44:07
【问题描述】:

我正在尝试两个重叠 2 秒的合并 2 视频。在这种重叠中,我想淡入第二个视频(或淡出第一个视频以显示第二个,任何一个都很棒)。

第一个视频按预期在结束前 2 秒淡出,但随着它的淡出,我得到一个黑屏,而不是第二个视频淡入。在视频 1 结束时,视频 2 显示在其淡入的一半在动画中。

我看不到它们重叠的轨道有什么问题?下面是我的代码

func setupVideo() {
        
    let url = URL(fileURLWithPath: Bundle.main.path(forResource: "demoVideoTwo", ofType: "mp4")!)
    let assetOne = AVAsset(url: url)
    
    let urlTwo = URL(fileURLWithPath: Bundle.main.path(forResource: "demoVideoThree", ofType: "mp4")!)
    let assetTwo = AVAsset(url: urlTwo)
            
    let mixComposition = AVMutableComposition()
    var instructions = [AVMutableVideoCompositionLayerInstruction]()
    var mainInstructionList = [AVMutableVideoCompositionInstruction]()
    var lastTime = CMTime.zero
    
    // Create Track One

    guard let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
        return
    }
    
    // Setup AVAsset 1

    let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: assetOne.duration)
    
    do {
        try videoTrack.insertTimeRange(timeRange, of: assetOne.tracks(withMediaType: .video)[0], at: lastTime)
        
        try audioTrack.insertTimeRange(timeRange, of: assetOne.tracks(withMediaType: .audio)[0], at: lastTime)
        
    } catch {
        print(error)
    }
    
    // Setup Layer Instruction 1

    let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)

    let duration = CMTime(seconds: 2, preferredTimescale: 60)
    let transitTime = CMTime(seconds: 2, preferredTimescale: 60)
    let insertTime = CMTimeSubtract(assetOne.duration, transitTime)
    let instRange = CMTimeRangeMake(start: insertTime, duration: duration)
    layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: instRange)
    instructions.append(layerInstruction)

    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRangeMake(start: lastTime, duration: assetOne.duration)
    mainInstruction.layerInstructions = instructions

    mainInstructionList.append(mainInstruction)

    lastTime = CMTimeAdd(lastTime, assetOne.duration)
    
    
    // Create Track One

    guard let videoTrackTwo = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), let audioTrackTwo = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
        return
    }
    
    // Setup AVAsset 2

    let transitionTime = CMTime(seconds: 2, preferredTimescale: 60)
    let newLastTime = CMTimeSubtract(assetOne.duration, transitionTime)

    let timeRangeTwo = CMTimeRangeMake(start: CMTime.zero, duration: assetTwo.duration)

    do {
        try videoTrackTwo.insertTimeRange(timeRangeTwo, of: assetTwo.tracks(withMediaType: .video)[0], at: newLastTime)

        try audioTrackTwo.insertTimeRange(timeRangeTwo, of: assetTwo.tracks(withMediaType: .audio)[0], at: newLastTime)

    } catch {
       print(error)
    }
    
    
    // Setup Layer Instruction 2

    let layerInstructionTwo = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrackTwo)
    let durationTwo = CMTime(seconds: 4, preferredTimescale: 60)
    let instRangeTwo = CMTimeRangeMake(start: newLastTime, duration: durationTwo)
    layerInstructionTwo.setOpacityRamp(fromStartOpacity: 0.0, toEndOpacity: 1.0, timeRange: instRangeTwo)
    instructions.append(layerInstructionTwo)

    let mainInstructionTwo = AVMutableVideoCompositionInstruction()
    mainInstructionTwo.timeRange = CMTimeRangeMake(start: lastTime, duration: assetTwo.duration)
    mainInstructionTwo.layerInstructions = instructions
    
    mainInstructionList.append(mainInstructionTwo)
    
    // Setup Video Composition

    let mainComposition = AVMutableVideoComposition()
    mainComposition.instructions = mainInstructionList
    mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 60)
    mainComposition.renderSize = videoTrack.naturalSize
            
    let item = AVPlayerItem(asset: mixComposition)
    item.videoComposition = mainComposition
    
    player = AVPlayer(playerItem: item)
    
    let playerLayer: AVPlayerLayer = {
        let layer = AVPlayerLayer(player: player)
        layer.videoGravity = .resizeAspectFill
        return layer
    }()
    
    let playerWidth: CGFloat = UIScreen.main.bounds.size.width
    let videoHeight = UIScreen.main.bounds.size.width * 9 / 16
            
    playerLayer.frame = CGRect(x: 0, y: 0, width: playerWidth, height: videoHeight)
    self.layer.addSublayer(playerLayer)
}

【问题讨论】:

    标签: swift avfoundation avplayer avasset avmutablecomposition


    【解决方案1】:

    不要为每个创建 AVMutableVideoCompositionInstruction 视频轨道,不要每次都给它分配指令。相反,只需在创建 AVMutableVideoComposition 之前创建一次 AVMutableVideoCompositionInstruction,然后为其分配指令。

    这是你想要的代码。

    func setupVideo() {
    
        let url = URL(fileURLWithPath: Bundle.main.path(forResource: "demoVideoTwo", ofType: "mp4")!)
        let assetOne = AVAsset(url: url)
    
        let urlTwo = URL(fileURLWithPath: Bundle.main.path(forResource: "demoVideoTwo", ofType: "mp4")!)
        let assetTwo = AVAsset(url: urlTwo)
    
        let mixComposition = AVMutableComposition()
        var instructions = [AVMutableVideoCompositionLayerInstruction]()
        var lastTime = CMTime.zero
    
        // Create Track One
    
        guard let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
            return
        }
    
        // Setup AVAsset 1
    
        let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: assetOne.duration)
    
        do {
            try videoTrack.insertTimeRange(timeRange, of: assetOne.tracks(withMediaType: .video)[0], at: lastTime)
    
            try audioTrack.insertTimeRange(timeRange, of: assetOne.tracks(withMediaType: .audio)[0], at: lastTime)
    
        } catch {
            print(error)
        }
    
        // Setup Layer Instruction 1
    
        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    
        let duration = CMTime(seconds: 2, preferredTimescale: 60)
        let transitTime = CMTime(seconds: 2, preferredTimescale: 60)
        let insertTime = CMTimeSubtract(assetOne.duration, transitTime)
        let instRange = CMTimeRangeMake(start: insertTime, duration: duration)
        layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: instRange)
        instructions.append(layerInstruction)
    
        lastTime = CMTimeAdd(lastTime, assetOne.duration)
    
    
        // Create Track One
    
        guard let videoTrackTwo = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), let audioTrackTwo = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
            return
        }
    
        // Setup AVAsset 2
    
        let transitionTime = CMTime(seconds: 2, preferredTimescale: 60)
        let newLastTime = CMTimeSubtract(assetOne.duration, transitionTime)
    
        let timeRangeTwo = CMTimeRangeMake(start: CMTime.zero, duration: assetTwo.duration)
    
        do {
            try videoTrackTwo.insertTimeRange(timeRangeTwo, of: assetTwo.tracks(withMediaType: .video)[0], at: newLastTime)
    
            try audioTrackTwo.insertTimeRange(timeRangeTwo, of: assetTwo.tracks(withMediaType: .audio)[0], at: newLastTime)
    
        } catch {
           print(error)
        }
    
        // Setup Layer Instruction 2
    
        let layerInstructionTwo = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrackTwo)
        layerInstructionTwo.setOpacity(1.0, at: newLastTime)
        instructions.append(layerInstructionTwo)
    
        // Setup Video Composition
    
        let mainInstruction = AVMutableVideoCompositionInstruction()
        mainInstruction.timeRange = CMTimeRangeMake(start: .zero, duration: CMTimeAdd(newLastTime, assetTwo.duration))
        mainInstruction.layerInstructions = instructions
    
        let mainComposition = AVMutableVideoComposition()
        mainComposition.instructions = [mainInstruction]
        mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 60)
        mainComposition.renderSize = videoTrack.naturalSize
    
        let item = AVPlayerItem(asset: mixComposition)
        item.videoComposition = mainComposition
    
        player = AVPlayer(playerItem: item)
    
        let playerLayer: AVPlayerLayer = {
            let layer = AVPlayerLayer(player: player)
            layer.videoGravity = .resizeAspectFill
            return layer
        }()
    
        let playerWidth: CGFloat = UIScreen.main.bounds.size.width
        let videoHeight = UIScreen.main.bounds.size.width * 9 / 16
    
        playerLayer.frame = CGRect(x: 0, y: 0, width: playerWidth, height: videoHeight)
        self.layer.addSublayer(playerLayer)
    }
    

    【讨论】:

      猜你喜欢
      • 2021-10-01
      • 1970-01-01
      • 2015-11-22
      • 2012-02-04
      • 1970-01-01
      • 2021-12-13
      • 2013-01-03
      • 2015-01-17
      • 2020-03-29
      相关资源
      最近更新 更多