1
votes

I am trying to play multiple audio files using 2 AVPlayer instances, but one of the player stops for a fraction of a second rather than playing all audio files simultaneously. The logic of the program is as follows:

var player: AVPlayer? will stream an audio file from my database. On its own is playing perfectly.

fileprivate var countPlayer: AVPlayer? plays the count number of the current item being played by var player. The count is a sequence of 1 to 8 and for each digit I am storing/sandobxing a .wav file locally such as 1.wav, 2.wav...8.wav.

When current time of var player is at a certain time, countPlayer is triggered and it plays one of the local file 1.wav, 2.wav..etc.

The problem is that when the var countPlayer starts playing, it causes the background AVPlayer, namely var player to stop for a fraction of a second, similar to what's described in this comment:

Play multiple Audio Files with AVPlayer

   var player: AVPlayer? //plays the song
   fileprivate var countPlayer: AVPlayer?  // plays the count number of song



   private func addBoundaryTimeObserver(tableIndexPath: IndexPath) {


    let mediaItem = mediaArray[tableIndexPath.row]

    guard let url = URL(string: mediaItem.mediaAudioUrlStringRepresentation ?? "") else {return}
    let playerItem = AVPlayerItem(url: url)
    player = AVPlayer(playerItem: playerItem)

    var timesToTransverse = [NSValue]()

    //convert string representation of times elements to array
    let timesRecorded: [String] = mediaItem.timesRecorded.components(separatedBy: ",")

    // Build boundary times from arrayOfBeats keys
    let timeDoubles: [Double] = timesRecorded.compactMap {timeString in
        if let second = Double("\(timeString)") {
            return second
        }
        return nil
    }

    guard timeDoubles.count > 0 else {return} //unexpected

     timesToTransverse = timeDoubles.map { second in
        let cmtime = CMTime(seconds: second, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
        return NSValue(time: cmtime)
    }

    guard timesToTransverse.count != 0 else {return}

        guard let playerCell = tableView.cellForRow(at: IndexPath(row: 0, section: 0)) as? PlayerCell else {return}

       startTime = Date().timeIntervalSinceReferenceDate

    timeIndex = 0
    player?.play()
    player?.rate = Float(initialPlaybackRate)



    // find the index of time
    //reset timeObserverToken
    // call a function with the new times sorted


    // Queue on which to invoke the callback
    let mainQueue = DispatchQueue.main
    // Add time observer
    timeObserverToken =
        player?.addBoundaryTimeObserver(forTimes: timesToTransverse, queue: mainQueue) {
            [weak self] in


            //because there are no time signature changes, we can simply increment  timeIndex with + 1 every time `addBoundaryTimeObserver` completion handler is called and subscript timesToTransverse with timeIndex in order to get the subsequent timeInSeconds
            guard let strongSelf = self, strongSelf.timeIndex < timesToTransverse.count else {return}

            let timeElement = timesToTransverse[strongSelf.timeIndex]
            strongSelf.timeInSeconds = CMTimeGetSeconds(timeElement.timeValue)

                //show progress in progressView
                let duration = CMTimeGetSeconds(playerItem.duration)
                let cmtimeSeconds = CMTime(seconds: strongSelf.timeInSeconds, preferredTimescale: CMTimeScale(NSEC_PER_SEC))

            //Total time since timer started, in seconds
            strongSelf.timeInSeconds = Date().timeIntervalSinceReferenceDate - strongSelf.startTime

            let timeString = String(format: "%.2f", strongSelf.timeInSeconds)
            strongSelf.timeString = timeString


            //use reminder operator to determine the beat count
            let beat = (strongSelf.timeIndex + 1) % 8 == 0 ? 8 : ((strongSelf.timeIndex + 1) % 8)


            //play the beat count : 1, 2, ...8
          self.preapareToPlayAudio(beatCount: beat) 

            /*
             0: (0 + 1) % 8 = 1
             1: (1 + 1) % 8 = 2
             6: (6 + 1) % 8 = 7
             7: (7 + 1) % 8 = 0
             */

            strongSelf.timeIndex += 1
    }
}//end addBoundaryTimeObserver


 //prepare determine what wav file to play 
   private func preapareToPlayAudio(beatCount: Int) {

  switch beatCount {  
    case 1:
        guard let url = Bundle.main.url(forResource: "1", withExtension: "wav") else {return}
        playWith(beatCountURL: url)

        //7 more cases go here .....
    default: print("unexpected case here")
    }   
}//end play(beatCount: Int)



     private func playWith(beatCountURL: URL) {

        let playerItem = AVPlayerItem(url: beatCountURL)
        countPlayer = AVPlayer(playerItem: playerItem)
        countPlayer?.play()
      }
1

1 Answers

3
votes

You would be better off using AVAudioPlayerNode, AVAudioMixerNode, AVAudioEngine. Using these classes you won't have problems like you have right now. It's also not that difficult to set up.

You can check out my gist, in order to play the sounds in your Playgrounds you would need to put audio files into Resources folder in Project Navigator: https://gist.github.com/standinga/24342d23acfe70dc08cbcc994895f32b

The code works without stopping background audio when top sounds are triggered.

Here's also the same code:

import AVFoundation
import PlaygroundSupport

PlaygroundPage.current.needsIndefiniteExecution = true

class AudioPlayer {
var backgroundAudioFile:AVAudioFile
var topAudioFiles: [AVAudioFile] = []
var engine:AVAudioEngine
var backgroundAudioNode: AVAudioPlayerNode
var topAudioAudioNodes = [AVAudioPlayerNode]()
var mixer: AVAudioMixerNode
var timer: Timer!
var urls: [URL] = []

init (_ url: URL, urls: [URL] = []) {
    backgroundAudioFile = try! AVAudioFile(forReading: url)
    topAudioFiles = urls.map { try! AVAudioFile(forReading: $0) }

    engine = AVAudioEngine()
    mixer = AVAudioMixerNode()

    engine.attach(mixer)
    engine.connect(mixer, to: engine.outputNode, format: nil)
    self.urls = urls
    backgroundAudioNode = AVAudioPlayerNode()
    for _ in topAudioFiles {
        topAudioAudioNodes += [AVAudioPlayerNode()]
    }
}

func start() {
    engine.attach(backgroundAudioNode)
    engine.connect(backgroundAudioNode, to: mixer, format: nil)
    backgroundAudioNode.scheduleFile(backgroundAudioFile, at: nil, completionHandler: nil)

    try! engine.start()
    backgroundAudioNode.play()

    for node in topAudioAudioNodes {
        engine.attach(node)
        engine.connect(node, to: mixer, format: nil)
        try! engine.start()
    }
// simulate rescheduling files played on top of background audio
    DispatchQueue.global().async { [unowned self] in
        for i in 0..<1000 {
        sleep(2)
        let index = i % self.topAudioAudioNodes.count
        let node = self.topAudioAudioNodes[index]
        node.scheduleFile(self.topAudioFiles[index], at: nil, completionHandler: nil)
        node.play()
        }
    }
    }
}

let bundle = Bundle.main
let beepLow = bundle.url(forResource: "beeplow", withExtension: "wav")!
let beepMid = bundle.url(forResource: "beepmid", withExtension: "wav")!
let backgroundAudio = bundle.url(forResource: "backgroundAudio", withExtension: "wav")!
let audioPlayer = AudioPlayer(backgroundAudio, urls: [beepLow, beepMid])
audioPlayer.start()