Skip to content
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ import Foundation
// swiftlint:disable type_body_length
class CallingSDKWrapper: NSObject, CallingSDKWrapperProtocol {

let audioPlayer = AudioPlayer()

let callingEventsHandler: CallingSDKEventsHandling

private let logger: Logger
Expand All @@ -23,6 +25,8 @@ class CallingSDKWrapper: NSObject, CallingSDKWrapperProtocol {
private var callKitRemoteInfo: CallKitRemoteInfo?
private var callingSDKInitializer: CallingSDKInitializer

var rawIncomingAudioStream: RawIncomingAudioStream?

init(logger: Logger,
callingEventsHandler: CallingSDKEventsHandling,
callConfiguration: CallConfiguration,
Expand Down Expand Up @@ -117,6 +121,39 @@ class CallingSDKWrapper: NSObject, CallingSDKWrapperProtocol {
throw CallCompositeInternalError.callJoinFailed
}

let audioStreamOptions = RawIncomingAudioStreamOptions()
let properties = RawIncomingAudioStreamProperties()
properties.format = .pcm16Bit
properties.sampleRate = .hz44100
properties.channelMode = .stereo
audioStreamOptions.properties = properties

let incomingAudioOptions = IncomingAudioOptions()

self.rawIncomingAudioStream = RawIncomingAudioStream(options: audioStreamOptions)
incomingAudioOptions.stream = self.rawIncomingAudioStream

joinCallOptions.incomingAudioOptions = incomingAudioOptions

self.rawIncomingAudioStream?.events.onMixedAudioBufferReceived = { args in
// Receive raw audio buffers(AVAudioPCMBuffer) and process them using AVAudioEngine API's.

guard let audioBuffer = args.audioBuffer.buffer as? AVAudioPCMBuffer else {
print("No audio data received")
return
}
// Process audioBuffer
print("RA frameLength \(audioBuffer.frameLength)")
print("RA frameCapacity \(audioBuffer.frameCapacity)")
print("RA format \(audioBuffer.format)")

self.audioPlayer.playAudioBuffer(audioBuffer)
}

self.rawIncomingAudioStream?.events.onStateChanged = { _ in
// To be notified when stream started and stopped.
}

do {
let callAgent = try await callingSDKInitializer.setupCallAgent()
let joinedCall = try await callAgent.join(with: joinLocator, joinCallOptions: joinCallOptions)
Expand Down Expand Up @@ -686,3 +723,32 @@ extension CallingSDKWrapper: DeviceManagerDelegate {
return videoStream
}
}

class AudioPlayer {
private let audioEngine = AVAudioEngine()
private let playerNode = AVAudioPlayerNode()

init() {
audioEngine.attach(playerNode)
audioEngine.connect(playerNode, to: audioEngine.mainMixerNode, format: nil)
}

func playAudioBuffer(_ buffer: AVAudioBuffer) {
if let pcmBuffer = buffer as? AVAudioPCMBuffer {
playPCMBuffer(pcmBuffer)
} else {
print("Unable to play: Buffer is not compatible")
}
}

private func playPCMBuffer(_ buffer: AVAudioPCMBuffer) {
playerNode.scheduleBuffer(buffer, at: nil, options: .loops, completionHandler: nil)
do {
try audioEngine.start()
playerNode.play()
} catch {
print("Error starting audio engine: \(error.localizedDescription)")
}
}

}