I integrated websocket and socket return byte array, and I used the below code to play audio from the byte that gets from the socket, and it works fine, but when there is a delay from the socket, the audio stops and does not start again.
channel: mono sampleRate: 16000
I added the below log from sockets to get the byte size with time
Received binary data of size: 640 at time: 10:50:31.022
Received binary data of size: 640 at time: 10:50:31.059
Received binary data of size: 640 at time: 10:50:31.067
Received binary data of size: 640 at time: 10:50:31.086
Received binary data of size: 640 at time: 10:50:31.104
Received binary data of size: 640 at time: 10:50:31.124
Received binary data of size: 640 at time: 10:50:31.152 --- see the difference on this time only
Received binary data of size: 640 at time: 10:50:31.414
Received binary data of size: 640 at time: 10:50:31.415
Received binary data of size: 640 at time: 10:50:31.415
Received binary data of size: 640 at time: 10:50:31.415
Received binary data of size: 640 at time: 10:50:31.416
Received binary data of size: 640 at time: 10:50:31.416
import Foundation
import AVFoundation
import Starscream
class AudioUnitPlayer {
var audioQueue: AudioQueueRef?
var audioFormat: AudioStreamBasicDescription
var isPlaying = false
init() {
audioFormat = AudioStreamBasicDescription(
mSampleRate: 16000,
mFormatID: kAudioFormatLinearPCM,
mFormatFlags: kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked,
mBytesPerPacket: 2, // 2 bytes per sample (16 bits)
mFramesPerPacket: 1, // 1 frame per packet
mBytesPerFrame: 2, // 2 bytes per frame (mono)
mChannelsPerFrame: 1, // Mono
mBitsPerChannel: 16, // 16 bits
mReserved: 0
)
}
func start() {
AudioQueueNewOutput(
&audioFormat,
audioQueueCallback,
Unmanaged.passUnretained(self).toOpaque(),
nil,
nil,
0,
&audioQueue
)
let numBuffers = 3
var buffers = [AudioQueueBufferRef?](repeating: nil, count: numBuffers)
for i in 0..<numBuffers {
AudioQueueAllocateBuffer(audioQueue!, 640, &buffers[i])
audioQueueCallback(inUserData: Unmanaged.passUnretained(self).toOpaque(), inAQ: audioQueue!, inBuffer: buffers[i]!)
}
AudioQueueStart(audioQueue!, nil)
isPlaying = true
}
func setToSpeakerMode() {
let audioSession = AVAudioSession.sharedInstance()
do {
// Set the audio session category to .playAndRecord
try audioSession.setCategory(.playAndRecord, options: .defaultToSpeaker)
try audioSession.setActive(true) // Activate the audio session
print("Audio output set to speaker")
} catch {
print("Failed to set to speaker mode: \(error)")
}
}
func setToNormalMode() {
let audioSession = AVAudioSession.sharedInstance()
do {
// Set the audio session category to .playAndRecord
try audioSession.setCategory(.playAndRecord)
try audioSession.setActive(true) // Activate the audio session
try audioSession.overrideOutputAudioPort(.none) // Resets to default routing
print("Audio output set to normal (earpiece) mode")
} catch {
print("Failed to set to normal mode: \(error)")
}
}
// This function ensures that the audio route is set according to your needs
private func setAudioRoute() throws {
let audioSession = AVAudioSession.sharedInstance()
// If device has a headset or Bluetooth, route audio accordingly
if audioSession.availableInputs?.contains(where: { $0.portType == .bluetoothHFP }) == true {
try audioSession.overrideOutputAudioPort(.speaker) // Use speaker if Bluetooth is connected
} else {
try audioSession.overrideOutputAudioPort(.none) // Default to earpiece or speaker based on system
}
}
func stop() {
print("Stopping audio queue")
guard isPlaying else { return }
AudioQueueStop(audioQueue!, true)
AudioQueueDispose(audioQueue!, true)
isPlaying = false
}
func loadPCMData(from data: Data, into buffer: AudioQueueBufferRef) -> Int {
let audioData = buffer.pointee.mAudioData
let bytesToCopy = min(data.count, Int(buffer.pointee.mAudioDataBytesCapacity))
data.copyBytes(to: audioData.assumingMemoryBound(to: UInt8.self), count: bytesToCopy)
return bytesToCopy
}
func enqueuePCMData(_ data: Data) {
let chunkSize = 640
var offset = 0
while offset < data.count {
let end = min(offset + chunkSize, data.count)
let chunk = data.subdata(in: offset..<end) // Extract a 640-byte chunk or the remaining bytes
guard let buffer = getFreeBuffer() else {
print("No free buffer available")
break
}
let dataSize = loadPCMData(from: chunk, into: buffer)
buffer.pointee.mAudioDataByteSize = UInt32(dataSize)
let status = AudioQueueEnqueueBuffer(audioQueue!, buffer, 0, nil)
if status != noErr {
print("Failed to enqueue audio buffer: \(status)")
}
offset += chunkSize
}
}
func getFreeBuffer() -> AudioQueueBufferRef? {
var buffer: AudioQueueBufferRef? = nil
let status = AudioQueueAllocateBuffer(audioQueue!, 640, &buffer)
guard status == noErr, let validBuffer = buffer else {
print("Failed to allocate audio buffer")
return nil
}
return validBuffer
}
}
func audioQueueCallback(
inUserData: UnsafeMutableRawPointer?,
inAQ: AudioQueueRef,
inBuffer: AudioQueueBufferRef
) {
print("Audio callback triggered")
let player = Unmanaged<AudioUnitPlayer>.fromOpaque(inUserData!).takeUnretainedValue()
inBuffer.pointee.mAudioDataByteSize = 0
AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, nil)
}