My speaker class would receives real-time streaming audio chunk from our backend (PCM, 16bit int, 48k pattern price, 1 channel, 20ms period) and the consumer is meant to play the audios.
Subsequently, I created this class Speaker that makes use of AVAudioEngine to play.
import AVFoundation
class Speaker {
// Attributes
non-public var audioQueue: [AVAudioPCMBuffer]
non-public let audioEngine: AVAudioEngine
non-public let audioPlayerNode: AVAudioPlayerNode
non-public var isPlaying: Bool
// format from backend
non-public let srcFormat: AVAudioFormat
// format for speaker
non-public let tgtFormat: AVAudioFormat
non-public let processQueue = DispatchQueue(label: "processQueue")
non-public let playQueue = DispatchQueue(label: "playQueue")
// Initializer
init(sampleRate: Double = 48000.0) throws {
self.audioQueue = []
self.audioEngine = AVAudioEngine()
self.audioPlayerNode = AVAudioPlayerNode()
self.isPlaying = false
self.srcFormat = AVAudioFormat(
commonFormat: .pcmFormatInt16,
sampleRate: sampleRate,
channels: 1,
interleaved: false
)!
self.tgtFormat = AVAudioFormat(
// ios solely helps enter format of .pcmFormatFloat32
commonFormat: .pcmFormatFloat32,
sampleRate: sampleRate,
channels: 1,
interleaved: false
)!
do {
strive configureAudioEngine()
} catch {
// report error to js
// potentiallly report back to sentry and file the gadget kind
}
NSLog("configuartion is completed")
NotificationCenter.default.addObserver(
self,
selector: #selector(handleRouteChange(_:)),
identify: AVAudioSession.routeChangeNotification,
object: nil
)
}
non-public func configureAudioEngine() throws {
NSLog("create pipeline")
self.audioEngine.connect(self.audioPlayerNode)
self.audioEngine.join(
self.audioPlayerNode,
to: self.audioEngine.mainMixerNode,
format: tgtFormat
)
NSLog("pipeline created")
NSLog("beginning engine")
strive self.audioEngine.begin()
NSLog("engine began")
}
deinit {
NotificationCenter.default.removeObserver(
self, identify: AVAudioSession.routeChangeNotification, object: nil
)
}
func play() {
guard !isPlaying else { return }
isPlaying = true
audioPlayerNode.play()
playNextBuffer()
}
func push(audioChunk: Knowledge) {
processQueue.async {
guard let buffer = self.createPCMBuffer(from: audioChunk) else {
NSLog("didn't create buffer from chunk")
return
}
NSLog("pushing chunk to queue, measurement: (buffer.frameLength)")
self.audioQueue.append(buffer)
NSLog("pushed one chunk to queue")
if self.isPlaying {
self.playNextBuffer()
}
}
}
func cease() {
isPlaying = false
audioPlayerNode.cease()
audioEngine.cease()
audioQueue.removeAll()
self.audioQueue = [] // clear the queue
}
non-public func playNextBuffer() {
playQueue.async {
guard self.isPlaying, self.audioPlayerNode.isPlaying, let buffer = self.audioQueue.first else {
NSLog("no buffer to play or the participant will not be taking part in, exiting...")
return
}
self.audioQueue.removeFirst()
NSLog("schedule to play buffer, measurement: (buffer.frameLength)")
self.audioPlayerNode.scheduleBuffer(buffer, completionHandler: self.playNextBuffer)
}
}
@objc non-public func handleRouteChange(_ notification: Notification) {
guard let userInfo = notification.userInfo,
let reasonValue = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt,
let motive = AVAudioSession.RouteChangeReason(rawValue: reasonValue) else {
return
}
change motive {
case .newDeviceAvailable:
NSLog("New gadget out there")
if self.isPlaying {
self.audioPlayerNode.pause()
if !self.audioEngine.isRunning {
self.audioEngine.begin()
}
if !self.audioPlayerNode.isPlaying {
self.audioPlayerNode.play()
}
case .oldDeviceUnavailable:
NSLog("Outdated gadget unavailable")
if let previousRoute = userInfo[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription {
for output in previousRoute.outputs {
if output.portType == .bluetoothA2DP || output.portType == .bluetoothLE || output.portType == .bluetoothHFP {
NSLog("Bluetooth gadget disconnected")
if isPlaying {
audioPlayerNode.pause()
audioPlayerNode.play()
}
}
}
}
default:
break
}
}
func createPCMBuffer(from information: Knowledge) -> AVAudioPCMBuffer? {
// Create the supply buffer
let int16Array = information.withUnsafeBytes {
Array(UnsafeBufferPointer<Int16>(
begin: $0.bindMemory(to: Int16.self).baseAddress!,
depend: information.depend / MemoryLayout<Int16>.measurement
)
)
}
// Create an AVAudioPCMBuffer from the Int16 array
let frameCapacity = UInt32(int16Array.depend)
guard let srcBuffer = AVAudioPCMBuffer(
pcmFormat: srcFormat, frameCapacity: frameCapacity
) else {
NSLog("srcBuffer is nil")
return nil
}
srcBuffer.frameLength = frameCapacity
// Copy information into the supply buffer
int16Array.withUnsafeBufferPointer { int16Pointer in
memcpy(srcBuffer.int16ChannelData![0], int16Pointer.baseAddress!, Int(frameCapacity) * MemoryLayout<Int16>.measurement)
}
// Create the goal buffer
guard let tgtBuffer = AVAudioPCMBuffer(
pcmFormat: tgtFormat, frameCapacity: frameCapacity
) else {
NSLog("destinationBuffer is nil")
return nil
}
// Create the AVAudioConverter
let converter = AVAudioConverter(from: srcFormat, to: tgtFormat)
do {
// Carry out the conversion
strive converter?.convert(to: tgtBuffer, from: srcBuffer)
} catch {
NSLog("Conversion error: (error)")
return nil
}
return tgtBuffer
}
}
Based on the official doc, when plug in a brand new exterior audio gadget like airpods should not pause the audio however as a substitute, direct the audio to that gadget.
Nonetheless, in my case, it could cease to play the audio proper after it connects to an airpods.
Word,
- earlier than, the code did not do something when new gadget is on the market within the notification handler, however later I examined and discovered the AudioEngine was turned off. Subsequently I add the code there to restart the AudioEngine and Participant Node.
- Our backend makes use of websocket to speak and due to this fact I did not use the webRTC framework within the first place.
The App is meant to maintain taking part in audio when it hook up with a bluetooth audio gadget.
I attempted so as to add a reset operate when new gadget is linked, however that does not assist.