From 8222fa1320b21e2fdb74207ef92fd63adaf88770 Mon Sep 17 00:00:00 2001 From: pax-k Date: Mon, 6 May 2024 18:45:43 +0300 Subject: [PATCH] refactored, added AudioController --- ios/AudioController.swift | 373 ++++++++++++++++++++++++++++++++ ios/ExpoAudioStreamModule.swift | 290 +------------------------ package.json | 2 +- 3 files changed, 382 insertions(+), 283 deletions(-) create mode 100644 ios/AudioController.swift diff --git a/ios/AudioController.swift b/ios/AudioController.swift new file mode 100644 index 0000000..c275c37 --- /dev/null +++ b/ios/AudioController.swift @@ -0,0 +1,373 @@ +import AVFoundation +import ExpoModulesCore + +public class AudioController { + var audioEngine: AVAudioEngine? + var audioPlayerNode: AVAudioPlayerNode? + + private let audioFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 16000.0, channels: 1, interleaved: false) + + // Two buffer queues for alternating playback, storing tuples of buffers and promises + private var bufferQueueA: [(buffer: AVAudioPCMBuffer, promise: RCTPromiseResolveBlock)] = [] + private var bufferQueueB: [(buffer: AVAudioPCMBuffer, promise: RCTPromiseResolveBlock)] = [] + private let bufferAccessQueue = DispatchQueue(label: "com.kinexpoaudiostream.bufferAccessQueue") // Serial queue for thread-safe buffer access + + private var isPlayingQueueA: Bool = false // Indicates which queue is currently in use for playback + + init() { + do { + try setupAudioComponentsAndStart() + setupNotifications() + } catch { + print("Failed to init") + } + } + + deinit { + NotificationCenter.default.removeObserver(self) + } + + // MARK: - Audio Setup + + private func activateAudioSession() throws { + let audioSession = AVAudioSession.sharedInstance() + try audioSession.setCategory(.playback, mode: .default, options: [.mixWithOthers, .duckOthers]) + try audioSession.setActive(true, options: .notifyOthersOnDeactivation) + } + + private func deactivateAudioSession() throws { + let audioSession = AVAudioSession.sharedInstance() + try audioSession.setActive(false, options: .notifyOthersOnDeactivation) + } + + private func setupAudioEngine() { + audioEngine = AVAudioEngine() + } + + private func setupAudioPlayerNode() { + if let engine = audioEngine { + audioPlayerNode = AVAudioPlayerNode() + engine.attach(audioPlayerNode!) + } + } + + private func connectNodes() { + if let engine = audioEngine, let playerNode = audioPlayerNode { +// let format = engine.mainMixerNode.outputFormat(forBus: 0) + engine.connect(playerNode, to: engine.mainMixerNode, format: self.audioFormat) + } + } + + private func safeStartEngine() throws { + if let engine = audioEngine, !engine.isRunning { + try engine.start() + } else if audioEngine == nil { + setupAudioEngine() + setupAudioPlayerNode() + connectNodes() + try audioEngine?.start() + } + } + + private func stopAndInvalidate() { + audioEngine?.stop() + audioEngine = nil + audioPlayerNode = nil + } + + private func setupAudioComponentsAndStart() throws { + do { + setupAudioEngine() + setupAudioPlayerNode() + connectNodes() + try safeStartEngine() + } catch { + print("Failed to reset and start audio components: \(error.localizedDescription)") + } + } + + + // MARK: - Notification Setup + + private func setupNotifications() { + let audioSession = AVAudioSession.sharedInstance() + NotificationCenter.default.addObserver(self, selector: #selector(handleAudioSessionInterruption), name: AVAudioSession.interruptionNotification, object: audioSession) + NotificationCenter.default.addObserver(self, selector: #selector(handleRouteChange), name: AVAudioSession.routeChangeNotification, object: audioSession) + NotificationCenter.default.addObserver(self, selector: #selector(handleMediaServicesWereLost), name: AVAudioSession.mediaServicesWereLostNotification, object: audioSession) + NotificationCenter.default.addObserver(self, selector: #selector(handleMediaServicesWereReset), name: AVAudioSession.mediaServicesWereResetNotification, object: audioSession) + NotificationCenter.default.addObserver(self, selector: #selector(audioSessionDidChangeFocus), name: AVAudioSession.silenceSecondaryAudioHintNotification, object: AVAudioSession.sharedInstance()) + + + NotificationCenter.default.addObserver(self, selector: #selector(appDidBecomeActive), name: UIApplication.didBecomeActiveNotification, object: nil) + + NotificationCenter.default.addObserver(self, selector: #selector(appWillResignActive), name: UIApplication.willResignActiveNotification, object: nil) + + } + + // MARK: - Notification Handlers + + @objc private func handleAudioSessionInterruption(notification: Notification) { + guard let info = notification.userInfo, + let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt, + let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { + return + } + + if type == .began { + // Audio has been interrupted + audioPlayerNode?.pause() + print("Audio session was interrupted.") + } else if type == .ended { + guard let optionsValue = info[AVAudioSessionInterruptionOptionKey] as? UInt else { + return + } + let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue) + + if options.contains(.shouldResume) { + // Attempt to reactivate the session and resume playing + do { + try activateAudioSession() + audioPlayerNode?.play() + print("Resuming audio after interruption.") + } catch { + print("Failed to reactivate audio session after interruption: \(error.localizedDescription)") + } + } else { + print("Interruption ended without a resume option.") + } + } + } + + @objc private func handleRouteChange(notification: Notification) { + guard let info = notification.userInfo, + let reasonValue = info[AVAudioSessionRouteChangeReasonKey] as? UInt, + let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue) else { + return + } + + switch reason { + case .oldDeviceUnavailable: + audioPlayerNode?.pause() + print("Headphones were unplugged.") + case .newDeviceAvailable: + // Resume playback if appropriate + audioPlayerNode?.play() + default: + break + } + } + + @objc private func handleMediaServicesWereLost(notification: Notification) { + stopAndInvalidate() + } + + @objc private func handleMediaServicesWereReset(notification: Notification) { + stopAndInvalidate() + do { + try setupAudioComponentsAndStart() + } catch { + print("Failed to handleMediaServicesWereReset: \(error.localizedDescription)") + } + } + + @objc func audioSessionDidChangeFocus(notification: Notification) { + guard let userInfo = notification.userInfo, + let typeValue = userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey] as? UInt, + let type = AVAudioSession.SilenceSecondaryAudioHintType(rawValue: typeValue) else { return } + + if type == .begin { + // Another app is asking for audio focus + safePause() + } else { + // Focus returned to your app + safePlay() + } + } + + @objc private func appDidBecomeActive(notification: Notification) { + // Attempt to regain control over the audio session and restart playback if needed + do { + let audioSession = AVAudioSession.sharedInstance() + try audioSession.setActive(true) + if let playerNode = audioPlayerNode, !audioEngine!.isRunning { + try safeStartEngine() + playerNode.play() // Resume playing only if it makes sense to do so + } + } catch { + print("Failed to activate audio session on app activation: \(error.localizedDescription)") + } + } + + // Adjust usage based on app lifecycle or specific user actions + @objc func appWillResignActive(notification: Notification) { + try? deactivateAudioSession() + } + + // MARK: - Playback controls + + + private func safePause() { + if let node = audioPlayerNode, let engine = audioEngine, engine.isRunning { + node.pause() + } else { + print("Cannot pause: Engine is not running or node is unavailable.") + } + } + + public func pause(promise: Promise) { + DispatchQueue.main.async { + self.safePause() + promise.resolve(nil) + } + } + + private func safeStop() { + if let node = audioPlayerNode, let engine = audioEngine, engine.isRunning { + node.stop() + } else { + print("Cannot stop: Engine is not running or node is unavailable.") + } + } + + + public func stop(promise: Promise) { + DispatchQueue.main.async { + self.safeStop() // Stop the audio player node + do { + try self.deactivateAudioSession() // Deactivate the session + self.bufferQueueA.removeAll() + self.bufferQueueB.removeAll() + promise.resolve(nil) + } catch { + promise.reject("PLAYBACK_STOP", "Failed to deactivate audio session: \(error.localizedDescription)") + print("Failed to deactivate audio session: \(error.localizedDescription)") + } + } + } + + + private func safePlay() { + if let node = audioPlayerNode, let engine = audioEngine, engine.isRunning { + node.play() + } else { + print("Engine is not running or node is unavailable.") + } + } + + func play(promise: Promise?) { + DispatchQueue.main.async { + // Ensure that the audio engine and nodes are set up + if self.audioEngine == nil || self.audioPlayerNode == nil || !self.audioEngine!.isRunning { + do { + try self.setupAudioComponentsAndStart() + } catch { + print("Failed to setupAudioComponentsAndStart: \(error.localizedDescription)") + } + } + + // Attempt to activate the audio session and play + do { + try self.activateAudioSession() + self.safePlay() + promise?.resolve(nil) + } catch { + promise?.reject("PLAYBACK_PLAY", "Failed to activate audio session or play audio: \(error.localizedDescription)") + print("Failed to activate audio session or play audio: \(error.localizedDescription)") + } + } + } + + @objc func setVolume(_ volume: Float, resolver: @escaping RCTPromiseResolveBlock, rejecter: @escaping RCTPromiseRejectBlock) { + let clampedVolume = max(0, min(volume, 100)) / 100.0 // Normalize and clamp to [0, 1] + + DispatchQueue.main.async { + self.audioPlayerNode?.volume = clampedVolume + resolver(nil) + } + } + + // MARK: - helper methods + + public func removeRIFFHeaderIfNeeded(from audioData: Data) -> Data? { + let headerSize = 44 // The "RIFF" header is 44 bytes + guard audioData.count > headerSize, audioData.starts(with: "RIFF".data(using: .ascii)!) else { + return audioData + } + return audioData.subdata(in: headerSize.. AVAudioPCMBuffer? { + // Prepare buffer for Float32 samples + guard let pcmBuffer = AVAudioPCMBuffer(pcmFormat: self.audioFormat!, frameCapacity: AVAudioFrameCount(pcmData.count / 2)) else { + print("Failed to create audio buffer.") + return nil + } + + var int16Samples = [Int16](repeating: 0, count: pcmData.count / 2) + int16Samples.withUnsafeMutableBytes { buffer in + pcmData.copyBytes(to: buffer) + } + + // Conversion to Float32 + let floatSamples = int16Samples.map { Float($0) / 32768.0 } + + pcmBuffer.frameLength = pcmBuffer.frameCapacity + if let channelData = pcmBuffer.floatChannelData { + for i in 0.. [(buffer: AVAudioPCMBuffer, promise: RCTPromiseResolveBlock)] { + return self.isPlayingQueueA ? self.bufferQueueA : self.bufferQueueB + } +} diff --git a/ios/ExpoAudioStreamModule.swift b/ios/ExpoAudioStreamModule.swift index c10dcc4..558f58d 100644 --- a/ios/ExpoAudioStreamModule.swift +++ b/ios/ExpoAudioStreamModule.swift @@ -3,283 +3,13 @@ import AVFoundation import ExpoModulesCore public class ExpoAudioStreamModule: Module { - private let audioFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 16000.0, channels: 1, interleaved: false) - private var audioEngine = AVAudioEngine() - private var playerNode = AVAudioPlayerNode() - private var audioSession = AVAudioSession.sharedInstance() - - // Two buffer queues for alternating playback, storing tuples of buffers and promises - private var bufferQueueA: [(buffer: AVAudioPCMBuffer, promise: RCTPromiseResolveBlock)] = [] - private var bufferQueueB: [(buffer: AVAudioPCMBuffer, promise: RCTPromiseResolveBlock)] = [] - private let bufferAccessQueue = DispatchQueue(label: "com.kinexpoaudiostream.bufferAccessQueue") // Serial queue for thread-safe buffer access - - private var isPlayingQueueA: Bool = false // Indicates which queue is currently in use for playback - private var anotherAppPlaying: Bool = false - private var isAudioEngineSetup: Bool = false - private var isAudioSessionSetup: Bool = false - - private func setupAudioEngine() { - self.playerNode = AVAudioPlayerNode() - self.audioEngine = AVAudioEngine() - self.audioEngine.attach(self.playerNode) - self.audioEngine.connect(self.playerNode, to: self.audioEngine.mainMixerNode, format: self.audioFormat) - self.playerNode.volume = 1.0 - self.isAudioEngineSetup = true - } - - private func configureAudioSession() { - do { - self.audioSession = AVAudioSession.sharedInstance() - try self.audioSession.setCategory(.playback, mode: .voicePrompt, options: [.duckOthers, .allowBluetooth, .allowBluetoothA2DP, .allowAirPlay]) - try self.audioSession.setActive(true, options: .notifyOthersOnDeactivation) - self.isAudioSessionSetup = true - } catch { - print("Error configuring audio session: \(error)") - } - } - - func restoreAudioSession() { - // Stop or pause all audio playback and recording - self.audioEngine.stop() - self.audioEngine.disconnectNodeOutput(self.audioEngine.mainMixerNode) - - // Wait for the audio engine to fully stop before deactivating the audio session - while self.audioEngine.isRunning { - // Wait for the audio engine to stop - Thread.sleep(forTimeInterval: 0.1) - } - - // Introduce a delay before deactivating the audio session - DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { - do { - // Deactivate the audio session - try self.audioSession.setActive(false, options: .notifyOthersOnDeactivation) - self.setupAudioEngine() - self.configureAudioSession() - } catch { - print("Error deactivating or reactivating audio session: \(error.localizedDescription)") - } - } - } - - private func startEngineAndNodeIfNeeded() { - do { - if !self.audioEngine.isRunning { - self.audioEngine.prepare() - try self.audioEngine.start() - } - - if !self.playerNode.isPlaying { - self.playerNode.play() - } - } catch { - self.restoreAudioSession() - print("Error starting audio engine: \(error)") - } - } - - @objc func streamRiff16Khz16BitMonoPcmChunk(_ chunk: String, resolver: @escaping RCTPromiseResolveBlock, rejecter: @escaping RCTPromiseRejectBlock) { - guard let audioData = Data(base64Encoded: chunk), - let pcmData = self.removeRIFFHeaderIfNeeded(from: audioData), - let pcmBuffer = self.convertPCMDataToBuffer(pcmData) else { - rejecter("ERR_DECODE_AUDIO", "Failed to process audio chunk", nil) - return - } - - print("pcmBuffer size: \(pcmBuffer.frameLength)") - - self.bufferAccessQueue.async { - let bufferTuple = (buffer: pcmBuffer, promise: resolver) - if self.isPlayingQueueA { - // Directly append to bufferQueueB if isPlayingQueueA is true - self.bufferQueueB.append(bufferTuple) - } else { - // Otherwise, append to bufferQueueA - self.bufferQueueA.append(bufferTuple) - } - - // Check if it's time to switch queues and initiate playback - if self.playerNode.isPlaying == false || self.currentQueue().isEmpty { - self.switchQueuesAndPlay() - } - } - } - - private func switchQueuesAndPlay() { - if !self.isAudioEngineSetup { - self.setupAudioEngine() - } - - if !self.isAudioSessionSetup { - self.configureAudioSession() - } - - // Clear the queue that just finished playing - self.bufferAccessQueue.async { - if self.isPlayingQueueA { - self.bufferQueueA.removeAll() - } else { - self.bufferQueueB.removeAll() - } - } - - self.isPlayingQueueA.toggle() // Switch queues - - // Schedule buffers from the new current queue for playback - let currentQueue = self.currentQueue() - for (buffer, promise) in currentQueue { - self.playerNode.scheduleBuffer(buffer) { [weak self] in - self?.onBufferCompletion(promise) - } - } - - - // Ensure the audio engine and player node are running - self.startEngineAndNodeIfNeeded() - - - } - - private func currentQueue() -> [(buffer: AVAudioPCMBuffer, promise: RCTPromiseResolveBlock)] { - return self.isPlayingQueueA ? self.bufferQueueA : self.bufferQueueB - } - - private func onBufferCompletion(_ promise: RCTPromiseResolveBlock) { - //Resolve the promise when the buffer finishes playing - promise(nil) - } - - - private func removeRIFFHeaderIfNeeded(from audioData: Data) -> Data? { - let headerSize = 44 // The "RIFF" header is 44 bytes - guard audioData.count > headerSize, audioData.starts(with: "RIFF".data(using: .ascii)!) else { - return audioData - } - return audioData.subdata(in: headerSize.. AVAudioPCMBuffer? { - // Prepare buffer for Float32 samples - guard let pcmBuffer = AVAudioPCMBuffer(pcmFormat: self.audioFormat!, frameCapacity: AVAudioFrameCount(pcmData.count / 2)) else { - print("Failed to create audio buffer.") - return nil - } - - var int16Samples = [Int16](repeating: 0, count: pcmData.count / 2) - int16Samples.withUnsafeMutableBytes { buffer in - pcmData.copyBytes(to: buffer) - } - - // Conversion to Float32 - let floatSamples = int16Samples.map { Float($0) / 32768.0 } - - pcmBuffer.frameLength = pcmBuffer.frameCapacity - if let channelData = pcmBuffer.floatChannelData { - for i in 0.. ModuleDefinition { Name("ExpoAudioStream") AsyncFunction("streamRiff16Khz16BitMonoPcmChunk") { (base64chunk: String, promise: Promise) in - self.streamRiff16Khz16BitMonoPcmChunk(base64chunk, resolver: { _ in + audioContoller.streamRiff16Khz16BitMonoPcmChunk(base64chunk, resolver: { _ in promise.resolve(nil) }, rejecter: { code, message, error in promise.reject(code ?? "ERR_UNKNOWN", message ?? "Unknown error") @@ -287,7 +17,7 @@ public class ExpoAudioStreamModule: Module { } AsyncFunction("setVolume") { (volume: Float, promise: Promise) in - self.setVolume(volume, resolver: { _ in + audioContoller.setVolume(volume, resolver: { _ in promise.resolve(nil) }, rejecter: { code, message, error in promise.reject(code ?? "ERR_VOLUME_ERROR", message ?? "Error setting volume") @@ -295,21 +25,17 @@ public class ExpoAudioStreamModule: Module { } AsyncFunction("pause") { promise in - self.pausePlayback(promise: promise) + audioContoller.pause(promise: promise) } - AsyncFunction("start") { promise in - self.startPlayback(promise: promise) + AsyncFunction("play") { promise in + audioContoller.play(promise: promise) } AsyncFunction("stop") { promise in - self.stopPlayback(promise: promise) + audioContoller.stop(promise: promise) } - OnCreate { - self.monitorAudioRouteChanges() - self.monitorAudioSessionNotifications() - self.updateAudioRoute() - } + OnCreate {} } } diff --git a/package.json b/package.json index cea5aa2..1f4f109 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@mykin-ai/expo-audio-stream", - "version": "0.1.13", + "version": "0.1.14", "description": "Expo Audio Stream module", "main": "build/index.js", "types": "build/index.d.ts",