So I attempted to create iOS app with uninterrupted audio stream for 24hrs, however buffer is overload.
Or to many working reminiscence used greater than 2GB for floats array is 48000x60x60x24. And a lot troubles with that.
How I can create on fly audio waveforms(floats[i] values not sine waves as current) inside stream and with out delays or raptures in audio output?
func{
let durationInMillis = 86400000
let amplitude = 0.1
let semaphore = DispatchSemaphore(worth: 1)
//Run async within the background in order to not block the present thread
DispatchQueue.international().async { [self] in
//Construct the participant and its engine
let frequencyInHz = 440
let audioPlayer = AVAudioPlayerNode()
let audioEngine = AVAudioEngine()
semaphore.wait()//Declare the semphore for blocking
audioEngine.connect(audioPlayer)
let mixer = audioEngine.mainMixerNode
var sampleRateHz = Float(mixer.outputFormat(forBus: 0).sampleRate)
guard let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatFloat32, sampleRate: Double(sampleRateHz), channels: AVAudioChannelCount(1), interleaved: false) else {
return
}
audioEngine.join(audioPlayer, to: mixer, format: format)
let numberOfSamples = AVAudioFrameCount((Float(durationInMillis) / 1000 * sampleRateHz))
//create the appropriatly sized buffer
guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: numberOfSamples) else {
return
}
buffer.frameLength = numberOfSamples
let channels = UnsafeBufferPointer(begin: buffer.floatChannelData, rely: Int(format.channelCount))
let floats = UnsafeMutableBufferPointer<Float>(begin: channels[0], rely: Int(numberOfSamples))
//calculate the angular frequency
let angularFrequency = Float(frequencyInHz * 2) * .pi
let bitStep = ((sampleRateHz / Float(fps)) / Float(80))
for i in 0 ..< Int(numberOfSamples) {
//creation sine wave
let waveComponent = sinf(Float(i) * angularFrequency / sampleRateHz)
floats[i] = waveComponent * amplitude
self.HMSFBPlusOne()
}
do {
strive audioEngine.begin()
}
catch{
print("Error: Audio Engine begin failure")
return
}
audioPlayer.play()
audioPlayer.scheduleBuffer(buffer, at: nil, choices: .interrupts){
DispatchQueue.essential.async {
semaphore.sign()
}
}
semaphore.wait()
semaphore.sign()
}
}
}