【问题标题】:Remove audio from a video file从视频文件中删除音频
【发布时间】:2023-07-16 05:01:01
【问题描述】:

我正在尝试从手机库中的 MOV 类型视频中删除音轨。我知道我可以在播放时将音频静音,但我计划上传用户视频,并且将音频取出并减小文件大小才有意义。

我尝试将 obj-c 代码从 THIS ANSWER 转换为 swift,但我要么搞砸了转换,要么就是没有从文件中取出音频。

任何帮助将不胜感激。

【问题讨论】:

  • 您找到解决方案了吗?

标签: ios swift audio video avfoundation


【解决方案1】:

最受好评的答案对我不起作用 + 我遇到了视频轮播问题。我最终通过添加参数muteSound: Bool 来修改我的视频压缩方法。

我使用2300000 的比特率来保持视频看起来不错,但尺寸比原始视频小 3-4 倍。

我将保持问题的开放性并将投票最多的问题标记为正确。也许有人可以想出一些更简单的东西。

func compressVideo(inputURL: NSURL, outputURL: NSURL, bitRate: Int, muteSound: Bool, onDone: () -> ()) {
  let videoAsset = AVURLAsset(URL: inputURL, options: nil)
  let videoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0]
  let videoSize = videoTrack.naturalSize
  let videoWriterCompressionSettings = [
    AVVideoAverageBitRateKey: bitRate
  ]

  let videoWriterSettings:[String : AnyObject] = [
    AVVideoCodecKey : AVVideoCodecH264,
    AVVideoCompressionPropertiesKey : videoWriterCompressionSettings,
    AVVideoWidthKey : Int(videoSize.width),
    AVVideoHeightKey : Int(videoSize.height)
  ]

  let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoWriterSettings)
  videoWriterInput.expectsMediaDataInRealTime = true
  videoWriterInput.transform = videoTrack.preferredTransform
  let videoWriter = try! AVAssetWriter(URL: outputURL, fileType: AVFileTypeQuickTimeMovie)
  videoWriter.addInput(videoWriterInput)

  let videoReaderSettings:[String : AnyObject] = [
    kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
  ]

  let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
  let videoReader = try! AVAssetReader(asset: videoAsset)
  videoReader.addOutput(videoReaderOutput)

  let audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: nil)
  audioWriterInput.expectsMediaDataInRealTime = false
  videoWriter.addInput(audioWriterInput)

  let audioTrack = videoAsset.tracksWithMediaType(AVMediaTypeAudio)[0]
  let audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
  let audioReader = try! AVAssetReader(asset: videoAsset)
  if muteSound == false {
    audioReader.addOutput(audioReaderOutput)
  }
  videoWriter.startWriting()

  videoReader.startReading()
  videoWriter.startSessionAtSourceTime(kCMTimeZero)
  let processingQueue = dispatch_queue_create("processingQueue1", nil)
  videoWriterInput.requestMediaDataWhenReadyOnQueue(processingQueue, usingBlock: {() -> Void in
    while videoWriterInput.readyForMoreMediaData {
      let sampleBuffer:CMSampleBuffer? = videoReaderOutput.copyNextSampleBuffer()
      if videoReader.status == .Reading && sampleBuffer != nil {
        videoWriterInput.appendSampleBuffer(sampleBuffer!)
      } else {
        videoWriterInput.markAsFinished()
        if videoReader.status == .Completed {
          if muteSound {
            // return without sound
            videoWriter.finishWritingWithCompletionHandler({() -> Void in
              onDone()
            })
          } else {
            audioReader.startReading()
            videoWriter.startSessionAtSourceTime(kCMTimeZero)
            let processingQueue = dispatch_queue_create("processingQueue2", nil)

            audioWriterInput.requestMediaDataWhenReadyOnQueue(processingQueue, usingBlock: {() -> Void in
              while audioWriterInput.readyForMoreMediaData {
                let sampleBuffer:CMSampleBufferRef? = audioReaderOutput.copyNextSampleBuffer()
                if audioReader.status == .Reading && sampleBuffer != nil {
                  audioWriterInput.appendSampleBuffer(sampleBuffer!)
                } else {
                  audioWriterInput.markAsFinished()
                  if audioReader.status == .Completed {
                    videoWriter.finishWritingWithCompletionHandler({() -> Void in
                      onDone()
                    })
                  }
                }
              }
            })
          }
        }
      }
    }
  })
}

【讨论】:

    【解决方案2】:

    你可以试试这个

    NSString * initPath1 = VideoLocalPath;
    AVMutableComposition *composition = [AVMutableComposition composition];
    
    NSString *inputVideoPath = initPath1;
    
    AVURLAsset * sourceAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:inputVideoPath] options:nil];
    
    AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    
    BOOL ok = NO;
    AVAssetTrack * sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    
    CMTimeRange x = CMTimeRangeMake(kCMTimeZero, [sourceAsset duration]);
    
    ok = [compositionVideoTrack insertTimeRange:x ofTrack:sourceVideoTrack atTime:kCMTimeZero error:nil];
    
    if([[NSFileManager defaultManager] fileExistsAtPath:initPath1]) {
        [[NSFileManager defaultManager] removeItemAtPath:initPath1 error:nil];
    }
    
    NSURL *url = [[NSURL alloc] initFileURLWithPath: initPath1];
    
    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
    
    exporter.outputURL=url;
    exporter.outputFileType = @"com.apple.quicktime-movie";
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        [self savefinalVideoFileToDocuments:exporter.outputURL];
    }];
    
    
    -(void)savefinalVideoFileToDocuments:(NSURL *)url {
        NSString *storePath = [[self applicationCacheDirectory] stringByAppendingPathComponent:@"Videos"];
        NSData * movieData = [NSData dataWithContentsOfURL:url];
        [movieData writeToFile:storePath atomically:YES];
    }
    
    - (NSString *)applicationCacheDirectory {
        NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
        NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
        return documentsDirectory;
    }
    

    迅速

    var initPath1 = VideoLocalPath
    var composition = AVMutableComposition.composition()
    var inputVideoPath = initPath1
    var sourceAsset = AVURLAsset(URL: URL(fileURLWithPath: inputVideoPath), options: nil)
    var compositionVideoTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
    var ok = false
    var sourceVideoTrack = sourceAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
    var x = CMTimeRangeMake(kCMTimeZero, sourceAsset.duration)
    do {
        ok = try compositionVideoTrack.insertTimeRange(x, of: sourceVideoTrack, atTime: kCMTimeZero)
    }
    catch let error {
    }
    if FileManager.default.fileExists(atPath: initPath1) {
        do {
            try FileManager.default.removeItem(at: initPath1)
        }
        catch let error {
        }
    }
    var url = URL(fileURLWithPath: initPath1)
    var exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
    exporter.outputURL = url
    
    
    func savefinalVideoFile(toDocuments url: URL) {
        var storePath = NSURL(fileURLWithPath: self.applicationCacheDirectory()).appendingPathComponent("Videos")!.absoluteString
        var movieData = Data(contentsOf: url)
        movieData.write(toFile: storePath, atomically: true)
    }
    
    func applicationCacheDirectory() -> String {
        var paths = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true)
        var documentsDirectory = (paths.count > 0) ? paths[0] : nil
        return documentsDirectory
    }
    

    【讨论】:

    • 似乎与我在问题中提到的代码完全相同(没有逐字检查)。我需要一个 swift 版本,因为我尝试将其转换为 swift 但没有成功。
    • 感谢您的转换...我现在试试,然后回复您
    • 抱歉,我复制代码时出现了太多错误,但它帮助我更接近可行的东西
    【解决方案3】:

    这是我在项目中使用的,它适用于纵向视频,也可以调整为横向视频。

        func removeSound(withFileURLs videoFileURL: URL, completion: @escaping (_ mergedVideoURL: URL?, _ error: Error?) -> Void) {
    
            let composition = AVMutableComposition()
            guard let videoTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) else {
                completion(nil, videoTrackError())
                return
            }
            var instructions = [AVVideoCompositionInstructionProtocol]()
            var isError = false
            var currentTime: CMTime = CMTime.zero
            var videoSize = CGSize.zero
            var highestFrameRate = 0
    
                let options = [AVURLAssetPreferPreciseDurationAndTimingKey: true]
                let asset = AVURLAsset(url: videoFileURL, options: options)
                guard let videoAsset: AVAssetTrack = asset.tracks(withMediaType: .video).first else {
                    return
                }
                if videoSize.equalTo(CGSize.zero) {
                    videoSize = (videoAsset.naturalSize)
                }
                var isVideoAssetPortrait_ = false
                let videoTransform: CGAffineTransform = videoAsset.preferredTransform
    
                if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0 {
                    isVideoAssetPortrait_ = true
                }
                if videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 {
                    isVideoAssetPortrait_ = true
                }
    
                var videoAssetWidth: CGFloat = videoAsset.naturalSize.width
                var videoAssetHeight: CGFloat = videoAsset.naturalSize.height
                if isVideoAssetPortrait_ {
                    videoAssetWidth = videoAsset.naturalSize.height
                    videoAssetHeight = videoAsset.naturalSize.width
                }
                videoSize.height = (videoAssetHeight)
                videoSize.width = (videoAssetWidth)
    
                let currentFrameRate = Int(roundf((videoAsset.nominalFrameRate)))
                highestFrameRate = (currentFrameRate > highestFrameRate) ? currentFrameRate : highestFrameRate
                let timeRange: CMTimeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration)
                do {
                    try videoTrack.insertTimeRange(timeRange, of: videoAsset, at: currentTime)
                    let videoCompositionInstruction = AVMutableVideoCompositionInstruction.init()
                    videoCompositionInstruction.timeRange = CMTimeRangeMake(start: currentTime, duration: timeRange.duration)
                    let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    
                    var isVideoAssetPortrait_ = false
                    let videoTransform: CGAffineTransform = videoAsset.preferredTransform
                    var videoAssetOrientation_: UIImage.Orientation = .up
                    if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0 {
                        videoAssetOrientation_ = .right
                        isVideoAssetPortrait_ = true
                    }
                    if videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 {
                        videoAssetOrientation_ = .left
                        isVideoAssetPortrait_ = true
                    }
                    if videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0 {
                        videoAssetOrientation_ = .up
                    }
                    if videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0 {
                        videoAssetOrientation_ = .down
                    }
    
                    var videoAssetWidth: CGFloat = videoAsset.naturalSize.width
                    var videoAssetHeight: CGFloat = videoAsset.naturalSize.height
                    if isVideoAssetPortrait_ {
                        videoAssetWidth = videoAsset.naturalSize.height
                        videoAssetHeight = videoAsset.naturalSize.width
                    }
                    var tx: Int = 0
                    if videoSize.width - videoAssetWidth != 0 {
                        tx = Int((videoSize.width - videoAssetWidth) / 2)
                    }
                    var ty: Int = 0
                    if videoSize.height - videoAssetHeight != 0 {
                        ty = Int((videoSize.height - videoAssetHeight) / 2)
                    }
                    var Scale = CGAffineTransform(scaleX: 1, y: 1)
                    var factor : CGFloat = 1.0
                    if tx != 0 && ty != 0 {
                        if tx <= ty {
                            factor = CGFloat(videoSize.width / videoAssetWidth)
                            Scale = CGAffineTransform(scaleX: CGFloat(factor), y: CGFloat(factor))
                            tx = 0
                            ty = Int((videoSize.height - videoAssetHeight * CGFloat(factor)) / 2)
                        }
                        if tx > ty {
                            factor = CGFloat(videoSize.height / videoAssetHeight)
                            Scale = CGAffineTransform(scaleX: CGFloat(factor), y: CGFloat(factor))
                            ty = 0
                            tx = Int((videoSize.width - videoAssetWidth * CGFloat(factor)) / 2)
                        }
                    }
    
                    var Move: CGAffineTransform!
                    var transform: CGAffineTransform!
                    switch videoAssetOrientation_ {
                    case UIImage.Orientation.right:
                        Move = CGAffineTransform(translationX: (videoAssetWidth * factor) + CGFloat(tx)  , y: CGFloat(ty))
                        transform = CGAffineTransform(rotationAngle:degreeToRadian(90))
                        layerInstruction.setTransform(transform.concatenating(Scale.concatenating(Move)), at: .zero)
                    case UIImage.Orientation.left:
                        Move = CGAffineTransform(translationX: CGFloat(tx), y: videoSize.height - CGFloat(ty))
                        transform = CGAffineTransform(rotationAngle: degreeToRadian(270))
                        layerInstruction.setTransform(transform.concatenating(Scale.concatenating(Move)), at: .zero)
                    case UIImage.Orientation.up:
                        Move = CGAffineTransform(translationX: CGFloat(tx), y: CGFloat(ty))
                        layerInstruction.setTransform(Scale.concatenating(Move), at: .zero)
                    case UIImage.Orientation.down:
                        Move = CGAffineTransform(translationX: videoSize.width + CGFloat(tx), y: (videoAssetHeight*factor)+CGFloat(ty))
                        transform = CGAffineTransform(rotationAngle: degreeToRadian(180))
                        layerInstruction.setTransform(transform.concatenating(Scale.concatenating(Move)), at: .zero)
                    default:
                        break
                    }
    
                    videoCompositionInstruction.layerInstructions = [layerInstruction]
                    instructions.append(videoCompositionInstruction)
                    currentTime = CMTimeAdd(currentTime, timeRange.duration)
                } catch {
                    print("Unable to load data: \(error)")
                    isError = true
                    completion(nil, error)
                }
    
            if isError == false {
                let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
                let strFilePath: String = generateMergedVideoFilePath()
                try? FileManager.default.removeItem(atPath: strFilePath)
                exportSession?.outputURL = URL(fileURLWithPath: strFilePath)
                exportSession?.outputFileType = .mp4
                exportSession?.shouldOptimizeForNetworkUse = true
                let mutableVideoComposition = AVMutableVideoComposition.init()
                mutableVideoComposition.instructions = instructions
                mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: Int32(highestFrameRate))
                mutableVideoComposition.renderSize = videoSize
                exportSession?.videoComposition = mutableVideoComposition
                let exportCompletion: (() -> Void) = {() -> Void in
                    DispatchQueue.main.async(execute: {() -> Void in
                        completion(exportSession?.outputURL, exportSession?.error)
                    })
                }
                if let exportSession = exportSession {
                    exportSession.exportAsynchronously(completionHandler: {() -> Void in
                        switch exportSession.status {
                        case .completed:
                            print("Successfully merged")
                            exportCompletion()
                        case .failed:
                            print("Failed")
                            exportCompletion()
                        case .cancelled:
                            print("Cancelled")
                            exportCompletion()
                        case .unknown:
                            print("Unknown")
                        case .exporting:
                            print("Exporting")
                        case .waiting:
                            print("Wating")
                        }
    
                    })
                }
            }
        }
    
    func videoTrackError() -> Error {
            let userInfo: [AnyHashable : Any] =
                [ NSLocalizedDescriptionKey :  NSLocalizedString("error", value: "Provide correct video file", comment: "") ,
                  NSLocalizedFailureReasonErrorKey : NSLocalizedString("error", value: "No video track available", comment: "")]
            return NSError(domain: "VideoMerger", code: 404, userInfo: (userInfo as! [String : Any]))
        }
    
        func generateMergedVideoFilePath() -> String {
            return URL(fileURLWithPath: ((FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).last)?.path)!).appendingPathComponent("\(UUID().uuidString)-soundlessVideo.mp4").path
        }
        func degreeToRadian(_ degree: CGFloat) -> CGFloat {
            return (.pi * degree / 180.0)
        }
    

    【讨论】:

      【解决方案4】:

      在 Swift 3 中,

      func removeAudioFromVideo(_ videoPath: String) {
         let initPath1: String = videoPath
         let composition = AVMutableComposition()
         let inputVideoPath: String = initPath1
         let sourceAsset = AVURLAsset(url: URL(fileURLWithPath: inputVideoPath), options: nil)
         let compositionVideoTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
         let sourceVideoTrack: AVAssetTrack? = sourceAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
         let x: CMTimeRange = CMTimeRangeMake(kCMTimeZero, sourceAsset.duration)
         _ = try? compositionVideoTrack!.insertTimeRange(x, of: sourceVideoTrack!, at: kCMTimeZero)
         if FileManager.default.fileExists(atPath: initPath1) {
             try? FileManager.default.removeItem(atPath: initPath1)
         }
         let url = URL(fileURLWithPath: initPath1)
         let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
         exporter?.outputURL = url
         exporter?.outputFileType = "com.apple.quicktime-movie"
         exporter?.exportAsynchronously(completionHandler: {() -> Void in
             self.saveFinalVideoFile(toDocuments: exporter!.outputURL!)
         })
      }
      
      func saveFinalVideoFile(toDocuments url: URL) {
         let fileURL = try! FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false).appendingPathComponent("Videos")
         let movieData = try? Data(contentsOf: url)
         try? movieData?.write(to: fileURL, options: .atomic)
      }
      

      【讨论】: