【问题标题】:How to record an audio stream for save it in file / swift 4.2如何录制音频流以将其保存在文件/swift 4.2中
【发布时间】:2019-06-09 21:50:38
【问题描述】:

我正在为 iPhone 创建一个收音机应用程序(用 Swift 4.2 编码),我想添加一个函数,让我可以录制并保存在一个文件中,当我按下时收音机产生的声音(从 AVPlayer 读取)按钮。我应该使用哪个代码?

代码在 Swift 4.2 和 Xcode 10.1 中。 我在网上搜索:“如何录制音频流 swift 4.2”、“如何从 AVPlayer swift 4.2 录制音频”,但我找不到答案。

我的代码:

import UIKit
import AVFoundation
import MediaPlayer

class ViewControllerPlayer: UIViewController { 

    var URl = "http://link_of_audio_stream"
    var player:AVPlayer?
    var playerItem:AVPlayerItem?
    var playerLayer:AVPlayerLayer?

    override func viewDidLoad() {
        super.viewDidLoad()

        let url = URL(string: URl)
        let playerItem1:AVPlayerItem = AVPlayerItem(url: url!)
        player = AVPlayer(playerItem: playerItem1)

    }

    @IBAction func Play(_ sender: Any) {
            player?.play()
    }
    @IBAction func Pause(_ sender: Any) {
            player?.pause()
    }
private var audioRecorder: AVAudioRecorder!

    func startRecording() throws {
        guard let newFileURL = createURLForNewRecord() else {
            throw RecordingServiceError.canNotCreatePath
        }
        do {
            var urlString = URL(string: URl)
            urlString = newFileURL
            audioRecorder = try AVAudioRecorder(url: newFileURL,
                                                settings: [AVFormatIDKey:Int(kAudioFormatMPEG4AAC),
                                                           AVSampleRateKey: 8000,
                                                           AVNumberOfChannelsKey: 1,
                                                           AVEncoderAudioQualityKey: AVAudioQuality.min.rawValue])
            audioRecorder.delegate = self as? AVAudioRecorderDelegate
            audioRecorder.prepareToRecord()

            audioRecorder.record(forDuration: TimeConstants.recordDuration) 
            //error: Use of unresolved identifier 'TimeConstants'

        } catch let error {
            print(error)
        }
    }

    func STOPREC1() throws {
        audioRecorder.stop()
        audioRecorder = nil
            print("Recording finished successfully.")
    }

    enum RecordingServiceError: String, Error {
        case canNotCreatePath = "Can not create path for new recording"
    }

    private func createURLForNewRecord() -> URL? {
        guard let appGroupFolderUrl = FileManager.getAppFolderURL() else {
            return nil
        }

        let date = String(describing: Date())
        let fullFileName = "Enregistrement radio " + date + ".m4a"
        let newRecordFileName = appGroupFolderUrl.appendingPathComponent(fullFileName)
        return newRecordFileName
    }
}
    extension FileManager {
        class func getAppFolderURL() -> URL? {
            let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
            let documentsDirectory = paths[0]
            return documentsDirectory
        }
    }

【问题讨论】:

    标签: ios swift audio stream record


    【解决方案1】:

    经过多次互联网搜索,我找到了解决方案。

    我在 Internet 上的某个地方发现了这个名为 «CachingPlayerItem.swift» 的 Swift 类,它允许录制在线音频流。

    import Foundation
    import AVFoundation
    
    fileprivate extension URL {
        
        func withScheme(_ scheme: String) -> URL? {
            var components = URLComponents(url: self, resolvingAgainstBaseURL: false)
            components?.scheme = scheme
            return components?.url
        }
        
    }
    
    @objc protocol CachingPlayerItemDelegate {
        
        /// Is called when the media file is fully downloaded.
        @objc optional func playerItem(_ playerItem: CachingPlayerItem, didFinishDownloadingData data: Data)
        
        /// Is called every time a new portion of data is received.
        @objc optional func playerItem(_ playerItem: CachingPlayerItem, didDownloadBytesSoFar bytesDownloaded: Int, outOf bytesExpected: Int)
        
        /// Is called after initial prebuffering is finished, means
        /// we are ready to play.
        @objc optional func playerItemReadyToPlay(_ playerItem: CachingPlayerItem)
        
        /// Is called when the data being downloaded did not arrive in time to
        /// continue playback.
        @objc optional func playerItemPlaybackStalled(_ playerItem: CachingPlayerItem)
        
        /// Is called on downloading error.
        @objc optional func playerItem(_ playerItem: CachingPlayerItem, downloadingFailedWith error: Error)
        
    }
    
    open class CachingPlayerItem: AVPlayerItem {
        
        class ResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSessionDelegate, URLSessionDataDelegate, URLSessionTaskDelegate {
            
            var playingFromData = false
            var mimeType: String? // is required when playing from Data
            var session: URLSession?
            var mediaData: Data?
            var response: URLResponse?
            var pendingRequests = Set<AVAssetResourceLoadingRequest>()
            weak var owner: CachingPlayerItem?
            var fileURL: URL!
            var outputStream: OutputStream?
            
            func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
                
                if playingFromData {
                    
                    // Nothing to load.
                    
                } else if session == nil {
                    
                    // If we're playing from a url, we need to download the file.
                    // We start loading the file on first request only.
                    guard let initialUrl = owner?.url else {
                        fatalError("internal inconsistency")
                    }
    
                    startDataRequest(with: initialUrl)
                }
                
                pendingRequests.insert(loadingRequest)
                processPendingRequests()
                return true
                
            }
            
            func startDataRequest(with url: URL) {
                
                var recordingName = "record.mp3"
                if let recording = owner?.recordingName{
                    recordingName = recording
                }
                
                fileURL = try! FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false)
                    .appendingPathComponent(recordingName)
                let configuration = URLSessionConfiguration.default
                configuration.requestCachePolicy = .reloadIgnoringLocalAndRemoteCacheData
                session = URLSession(configuration: configuration, delegate: self, delegateQueue: nil)
                session?.dataTask(with: url).resume()
                outputStream = OutputStream(url: fileURL, append: true)
                outputStream?.schedule(in: RunLoop.current, forMode: RunLoop.Mode.default)
                outputStream?.open()
                
            }
            
            func resourceLoader(_ resourceLoader: AVAssetResourceLoader, didCancel loadingRequest: AVAssetResourceLoadingRequest) {
                pendingRequests.remove(loadingRequest)
            }
            
            // MARK: URLSession delegate
            
            func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
                let bytesWritten = data.withUnsafeBytes{outputStream?.write($0, maxLength: data.count)}
            }
            
            func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive response: URLResponse, completionHandler: @escaping (URLSession.ResponseDisposition) -> Void) {
                completionHandler(Foundation.URLSession.ResponseDisposition.allow)
                mediaData = Data()
                self.response = response
                processPendingRequests()
            }
            
            func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) {
                if let errorUnwrapped = error {
                    owner?.delegate?.playerItem?(owner!, downloadingFailedWith: errorUnwrapped)
                    return
                }
                processPendingRequests()
                owner?.delegate?.playerItem?(owner!, didFinishDownloadingData: mediaData!)
            }
            
            // MARK: -
            
            func processPendingRequests() {
                
                // get all fullfilled requests
                let requestsFulfilled = Set<AVAssetResourceLoadingRequest>(pendingRequests.compactMap {
                    self.fillInContentInformationRequest($0.contentInformationRequest)
                    if self.haveEnoughDataToFulfillRequest($0.dataRequest!) {
                        $0.finishLoading()
                        return $0
                    }
                    return nil
                })
            
                // remove fulfilled requests from pending requests
                _ = requestsFulfilled.map { self.pendingRequests.remove($0) }
    
            }
            
            func fillInContentInformationRequest(_ contentInformationRequest: AVAssetResourceLoadingContentInformationRequest?) {
                if playingFromData {
                    contentInformationRequest?.contentType = self.mimeType
                    contentInformationRequest?.contentLength = Int64(mediaData!.count)
                    contentInformationRequest?.isByteRangeAccessSupported = true
                    return
                }
                
                guard let responseUnwrapped = response else {
                    // have no response from the server yet
                    return
                }
                
                contentInformationRequest?.contentType = responseUnwrapped.mimeType
                contentInformationRequest?.contentLength = responseUnwrapped.expectedContentLength
                contentInformationRequest?.isByteRangeAccessSupported = true
                
            }
            
            func haveEnoughDataToFulfillRequest(_ dataRequest: AVAssetResourceLoadingDataRequest) -> Bool {
                
                let requestedOffset = Int(dataRequest.requestedOffset)
                let requestedLength = dataRequest.requestedLength
                let currentOffset = Int(dataRequest.currentOffset)
                
                guard let songDataUnwrapped = mediaData,
                    songDataUnwrapped.count > currentOffset else {
                    return false
                }
                
                let bytesToRespond = min(songDataUnwrapped.count - currentOffset, requestedLength)
                let dataToRespond = songDataUnwrapped.subdata(in: Range(uncheckedBounds: (currentOffset, currentOffset + bytesToRespond)))
                dataRequest.respond(with: dataToRespond)
                
                return songDataUnwrapped.count >= requestedLength + requestedOffset
                
            }
            
            deinit {
                session?.invalidateAndCancel()
            }
            
        }
        
        fileprivate let resourceLoaderDelegate = ResourceLoaderDelegate()
        fileprivate let url: URL
        fileprivate let initialScheme: String?
        fileprivate var customFileExtension: String?
        
        
        weak var delegate: CachingPlayerItemDelegate?
        
        func stopDownloading(){
            resourceLoaderDelegate.session?.invalidateAndCancel()
        }
        
        open func download() {
            if resourceLoaderDelegate.session == nil {
                resourceLoaderDelegate.startDataRequest(with: url)
            }
        }
        
        private let cachingPlayerItemScheme = "cachingPlayerItemScheme"
        var recordingName = "record.mp3"
        /// Is used for playing remote files.
        convenience init(url: URL, recordingName: String) {
            self.init(url: url, customFileExtension: nil, recordingName: recordingName)
        }
        
        /// Override/append custom file extension to URL path.
        /// This is required for the player to work correctly with the intended file type.
        init(url: URL, customFileExtension: String?, recordingName: String) {
            
            guard let components = URLComponents(url: url, resolvingAgainstBaseURL: false),
                let scheme = components.scheme,
                var urlWithCustomScheme = url.withScheme(cachingPlayerItemScheme) else {
                fatalError("Urls without a scheme are not supported")
            }
            self.recordingName = recordingName
            self.url = url
            self.initialScheme = scheme
            
            if let ext = customFileExtension {
                urlWithCustomScheme.deletePathExtension()
                urlWithCustomScheme.appendPathExtension(ext)
                self.customFileExtension = ext
            }
            
            let asset = AVURLAsset(url: urlWithCustomScheme)
            asset.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
            super.init(asset: asset, automaticallyLoadedAssetKeys: nil)
            
            resourceLoaderDelegate.owner = self
            
            addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.new, context: nil)
            
            NotificationCenter.default.addObserver(self, selector: #selector(playbackStalledHandler), name:NSNotification.Name.AVPlayerItemPlaybackStalled, object: self)
            
        }
        
        /// Is used for playing from Data.
        init(data: Data, mimeType: String, fileExtension: String) {
            
            guard let fakeUrl = URL(string: cachingPlayerItemScheme + "://whatever/file.\(fileExtension)") else {
                fatalError("internal inconsistency")
            }
            
            self.url = fakeUrl
            self.initialScheme = nil
            
            resourceLoaderDelegate.mediaData = data
            resourceLoaderDelegate.playingFromData = true
            resourceLoaderDelegate.mimeType = mimeType
            
            let asset = AVURLAsset(url: fakeUrl)
            asset.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
            super.init(asset: asset, automaticallyLoadedAssetKeys: nil)
            resourceLoaderDelegate.owner = self
            
            addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.new, context: nil)
            
            NotificationCenter.default.addObserver(self, selector: #selector(playbackStalledHandler), name:NSNotification.Name.AVPlayerItemPlaybackStalled, object: self)
            
        }
        
        // MARK: KVO
        
        override open func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
            delegate?.playerItemReadyToPlay?(self)
        }
        
        // MARK: Notification hanlers
        
        @objc func playbackStalledHandler() {
            delegate?.playerItemPlaybackStalled?(self)
        }
    
        // MARK: -
        
        override init(asset: AVAsset, automaticallyLoadedAssetKeys: [String]?) {
            fatalError("not implemented")
        }
        
        deinit {
            NotificationCenter.default.removeObserver(self)
            removeObserver(self, forKeyPath: "status")
            resourceLoaderDelegate.session?.invalidateAndCancel()
        }
        
    }
    

    之后,在你的主 swift 文件中,你把这段代码记录下来:

    let recordingName = "my_rec_name.mp3"
    var playerItem: CachingPlayerItem!
    let url_stream = URL(string: "http://my_url_stream_link")
    playerItem = CachingPlayerItem(url: url_stream!, recordingName: recordingName ?? "record.mp3")
    var player1 = AVPlayer(playerItem: playerItem)
    player1.automaticallyWaitsToMinimizeStalling = false
    

    要停止记录,请使用以下代码:

    playerItem.stopDownloading()
    recordingName = nil
    playerItem = nil
    

    录音将保存在您应用的目录中。

    【讨论】:

      【解决方案2】:

      我在这个问题上遇到了很多困难,所以我发布了一个答案。

      记得将这些行添加到您的 info.plist:

      这是我的控制器,它记录语音输入并将其返回到以前的控制器:

      import Foundation
      import UIKit
      import Speech
      
      class SpeechToTextViewController: UIViewController {
      
      @IBOutlet weak var animationView: UIView!
      @IBOutlet weak var circleView: UIView!
      @IBOutlet weak var micImage: UIImageView!
      @IBOutlet weak var listeningLabel: UILabel!
      @IBOutlet weak var buttonStartView: UIView!
      @IBOutlet weak var cancelRecordingButton: UIButton!
      @IBOutlet weak var stopRecordingButton: UIButton!
      @IBOutlet weak var startRecordingButton: UIButton!
      
      private let audioEngine = AVAudioEngine()
      private let speechRecognizer = SFSpeechRecognizer(locale: Locale.init(identifier:"en-US"))
      private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest!
      private var recognitionTask: SFSpeechRecognitionTask?
      private var isRecording: Bool = false
      
      var delegate: SpeechToTextViewDelegate?
      
      override func viewDidLoad() {
          super.viewDidLoad()
          self.view.backgroundColor = UIColor(white: 1.0, alpha: 0.25)
          self.stopRecordingButton.isHidden = true
          self.listeningLabel.isHidden = true
      }
      
      @IBAction func startStopRecording(_ sender: Any) {
          isRecording = !isRecording
          if isRecording && !audioEngine.isRunning {
              self.cancelRecordingButton.isHidden = true
              self.startRecordingButton.isHidden = true
              self.stopRecordingButton.isHidden = false
              self.listeningLabel.isHidden = false
              UIView.animate(withDuration: 1, animations: {}) { _ in
                  UIView.animate(withDuration: 1, delay: 0.25, options: [.autoreverse, .repeat], animations: {
                      self.circleView.transform = CGAffineTransform(scaleX: 1.5, y: 1.5)
                  })
              }
              do {
                  try recordSpeech()
              } catch {
                  print(error)
              }
          } else {
              self.listeningLabel.isHidden = true
              stopRecording()
          }
      }
      
      func recordSpeech() throws {
          recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
          let node = audioEngine.inputNode
          let recordingFormat = node.outputFormat(forBus: 0)
          node.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) {buffer, _ in
              self.recognitionRequest.append(buffer)
          }
          audioEngine.prepare()
          try audioEngine.start()
          guard let myRecognizer = SFSpeechRecognizer() else {
              print("myRecognizer is unable to be created")
              return
          }
          if !myRecognizer.isAvailable
          {
              print("myRecognizer is not available")
              return
          }
          recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest, resultHandler: { result, error in
              var isFinal = false
              if let result = result
              {
                  isFinal = result.isFinal
                  self.delegate?.appendMessage(result.bestTranscription.formattedString)
              }
              if error != nil || isFinal {
                  if error != nil {
                      print("error trying to capture speech to text")
                      print(error!)
                  }
                  self.stopRecording()
              }
          })
      }
      
      func stopRecording() {
          if audioEngine.isRunning {
              self.audioEngine.stop()
              self.recognitionRequest.endAudio()
              // Cancel the previous task if it's running
              if let recognitionTask = recognitionTask {
                  recognitionTask.cancel()
                  self.recognitionTask = nil
              }
          }
          delegate?.doneTalking()
          self.dismiss(animated: true, completion: nil)
      }
      
      @IBAction func cancelRecording(_ sender: Any) {
          delegate?.doneTalking()
          self.dismiss(animated: true, completion: nil)
      }
      

      }

      【讨论】:

      • 感谢您的代码,但这不是我要求的。我只想在线录制音频流(不是语音)并将其保存到文件中。 PS:看我的代码。
      • 我看不出这个答案与问题有什么关系。
      【解决方案3】:

      使用 AVAudioRecorder 进行录音:

      private var audioRecorder: AVAudioRecorder!
      

      声明一个录音机后,你可以编写一个录音方法:

      func startRecording() throws {
              guard let newFileURL = createURLForNewRecord() else {
                  throw RecordingServiceError.canNotCreatePath
              }
              do {
                  currentFileURL = newFileURL
                  audioRecorder = try AVAudioRecorder(url: newFileURL,
                                                      settings: [AVFormatIDKey:Int(kAudioFormatMPEG4AAC),
                                                                 AVSampleRateKey: 8000,
                                                                 AVNumberOfChannelsKey: 1,
                                                                 AVEncoderAudioQualityKey: AVAudioQuality.min.rawValue])
                  audioRecorder.delegate = self
                  audioRecorder.prepareToRecord()
                  audioRecorder.record(forDuration: TimeConstants.recordDuration)
              } catch let error {
                  print(error)
              }
      }
      

      并使用一些辅助方法和结构:

      enum RecordingServiceError: String, Error {
          case canNotCreatePath = "Can not create path for new recording"
      }
      
      private func createURLForNewRecord() -> URL? {
              guard let appGroupFolderUrl = FileManager.getAppFolderURL() else {
                  return nil
              }
      
              let fileNamePrefix = DateFormatter.stringFromDate(Date())
              let fullFileName = "Record_" + fileNamePrefix + ".m4a"
              let newRecordFileName = appGroupFolderUrl.appendingPathComponent(fullFileName)
              return newRecordFileName
      }
      
      extension FileManager {
          class func getAppFolderURL() -> URL? {
              return FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: "you app bundle")
          }
      }
      

      【讨论】:

      • 这个方法应该开始录制你的音频流audioRecorder.record(forDuration: TimeConstants.recordDuration)
      • 当我使用“audioRecorder.record(forDuration: TimeConstants.recordDuration)”时,出现错误:使用未解析的标识符“TimeConstants”。我更改了我的代码,请不要犹豫。
      • 我不明白这个答案如何解决这个问题。问题是询问如何将音频流(可能来自在线 URL)保存到设备本地的文件中。
      • @JCutting8 我在 createURLForNewRecord() 方法中创建了路径并将其放在 AVAudioRecorder init 上,因此当录制完成时,您会在此文件中获得录制的音频
      猜你喜欢
      • 1970-01-01
      • 2016-03-25
      • 1970-01-01
      • 1970-01-01
      • 1970-01-01
      • 1970-01-01
      • 2018-01-17
      • 2011-06-28
      • 1970-01-01
      相关资源
      最近更新 更多