From 285c5fced0fd633d4aaa04cb7f3dd76dffc339c7 Mon Sep 17 00:00:00 2001 From: Alexander Demchuk Date: Tue, 3 Dec 2024 12:18:11 +0100 Subject: [PATCH] Added bluetooth mic recording (#3) * fix: moved session restart logic to module * refactor: streamline audio session management by removing redundant state checks and enhancing logging --- ios/AudioSessionManager.swift | 76 ++++++++++++++++++++--------- ios/ExpoPlayAudioStreamModule.swift | 70 ++++++++++++-------------- package.json | 2 +- 3 files changed, 85 insertions(+), 63 deletions(-) diff --git a/ios/AudioSessionManager.swift b/ios/AudioSessionManager.swift index 545720f..0d61927 100644 --- a/ios/AudioSessionManager.swift +++ b/ios/AudioSessionManager.swift @@ -263,23 +263,25 @@ class AudioSessionManager { } func pauseAudio(promise: Promise) { + Logger.debug("Pausing Audio") if let node = audioPlayerNode, self.audioEngine.isRunning, node.isPlaying { + Logger.debug("Pausing audio. Audio engine is running and player node is playing") node.pause() node.stop() self.audioEngine.stop() self.destroyPlayerNode() self.audioEngine = AVAudioEngine() } else { - print("Cannot pause: Engine is not running or node is unavailable.") + Logger.debug("Cannot pause: Engine is not running or node is unavailable.") } promise.resolve(nil) } private func restartAudioSessionForPlayback() throws { + Logger.debug("Restarting Audio Session") let audioSession = AVAudioSession.sharedInstance() - try audioSession.setCategory(.playback, mode: .default) + try audioSession.setCategory(.playback, mode: .voicePrompt) try audioSession.setActive(true) - Logger.debug("Reattaching the nodes") self.audioEngine = AVAudioEngine() @@ -352,13 +354,10 @@ class AudioSessionManager { pausedDuration = 0 isPaused = false - let session = AVAudioSession.sharedInstance() do { + let session = AVAudioSession.sharedInstance() Logger.debug("Debug: Configuring audio session with sample rate: \(settings.sampleRate) Hz") - // Create an audio format with the desired sample rate - let desiredFormat = AVAudioFormat(commonFormat: commonFormat, sampleRate: newSettings.sampleRate, channels: UInt32(newSettings.numberOfChannels), interleaved: true) - // Check if the input node supports the desired format let inputNode = audioEngine.inputNode let hardwareFormat = inputNode.inputFormat(forBus: 0) @@ -367,18 +366,17 @@ class AudioSessionManager { newSettings.sampleRate = session.sampleRate } - try session.setCategory(.playAndRecord) - try session.setMode(.default) + try session.setCategory(.playAndRecord, mode: .videoChat, options: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP]) try session.setPreferredSampleRate(settings.sampleRate) try session.setPreferredIOBufferDuration(1024 / settings.sampleRate) try session.setActive(true) - Logger.debug("Debug: Audio session activated successfully.") let actualSampleRate = session.sampleRate if actualSampleRate != newSettings.sampleRate { Logger.debug("Debug: Preferred sample rate not set. Falling back to hardware sample rate: \(actualSampleRate) Hz") newSettings.sampleRate = actualSampleRate } + Logger.debug("Debug: Audio session is successfully configured. Actual sample rate is \(actualSampleRate) Hz") recordingSettings = newSettings // Update the class property with the new settings } catch { @@ -394,7 +392,6 @@ class AudioSessionManager { return StartRecordingResult(error: "Error: Failed to create audio format with the specified bit depth.") } - audioEngine.inputNode.installTap(onBus: 0, bufferSize: 1024, format: audioFormat) { [weak self] (buffer, time) in guard let self = self else { Logger.debug("Error: File URL or self is nil during buffer processing.") @@ -441,8 +438,17 @@ class AudioSessionManager { return RecordingResult(fileUri: "", error: "Recording is not active") } + if self.audioPlayerNode != nil { + Logger.debug("Destroying playback") + self.destroyPlayerNode() + } audioEngine.stop() audioEngine.inputNode.removeTap(onBus: 0) + do { + try self.restartAudioSessionForPlayback() + } catch { + Logger.debug("Error restarting audio session for playback: \(error)") + } isRecording = false guard let fileURL = recordingFileURL, let startTime = startTime, let settings = recordingSettings else { @@ -471,13 +477,6 @@ class AudioSessionManager { // Update the WAV header with the correct file size updateWavHeader(fileURL: fileURL, totalDataSize: fileSize - 44) // Subtract the header size to get audio data size - if self.audioPlayerNode != nil { - Logger.debug("Destroying playback") - self.destroyPlayerNode() - } - - try self.restartAudioSessionForPlayback() - let result = RecordingResult( fileUri: fileURL.absoluteString, filename: fileURL.lastPathComponent, @@ -627,6 +626,38 @@ class AudioSessionManager { } + private func tryConvertToFormat(inputBuffer buffer: AVAudioPCMBuffer, desiredSampleRate sampleRate: Double, desiredChannel channels: AVAudioChannelCount) -> AVAudioPCMBuffer? { + var error: NSError? = nil + var commonFormat: AVAudioCommonFormat = .pcmFormatInt16 + switch recordingSettings?.bitDepth { + case 16: + commonFormat = .pcmFormatInt16 + case 32: + commonFormat = .pcmFormatInt32 + default: + Logger.debug("Unsupported bit depth. Defaulting to 16-bit PCM") + commonFormat = .pcmFormatInt16 + } + guard let nativeInputFormat = AVAudioFormat(commonFormat: commonFormat, sampleRate: buffer.format.sampleRate, channels: 1, interleaved: true) else { + Logger.debug("AudioSessionManager: Failed to convert to desired format. AudioFormat is corrupted.") + return nil + } + let desiredFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: sampleRate, channels: channels, interleaved: false)! + let inputAudioConverter = AVAudioConverter(from: nativeInputFormat, to: desiredFormat)! + + let convertedBuffer = AVAudioPCMBuffer(pcmFormat: desiredFormat, frameCapacity: 1024)! + let status = inputAudioConverter.convert(to: convertedBuffer, error: &error, withInputFrom: {inNumPackets, outStatus in + outStatus.pointee = .haveData + buffer.frameLength = inNumPackets + return buffer + }) + if status == .haveData { + return convertedBuffer + } + return nil + } + + /// Updates the WAV header with the correct file size. /// - Parameters: @@ -678,11 +709,12 @@ class AudioSessionManager { if let resampledBuffer = resampleAudioBuffer(buffer, from: buffer.format.sampleRate, to: targetSampleRate) { finalBuffer = resampledBuffer } else { - Logger.debug("Failed to resample audio buffer. Using manual resampling buffer.") - Logger.debug("Failed to resample audio buffer. Using manual resample buffer.") - if let manualResampleBuffer = self.manualResampleAudioBuffer(buffer, from: buffer.format.sampleRate, to: targetSampleRate) { - finalBuffer = manualResampleBuffer + Logger.debug("Fallback to AVAudioConverter. Converting from \(buffer.format.sampleRate) Hz to \(targetSampleRate) Hz") + + if let convertedBuffer = self.tryConvertToFormat(inputBuffer: buffer, desiredSampleRate: targetSampleRate, desiredChannel: 1) { + finalBuffer = convertedBuffer } else { + Logger.debug("Failed to convert to desired format.") finalBuffer = buffer } } diff --git a/ios/ExpoPlayAudioStreamModule.swift b/ios/ExpoPlayAudioStreamModule.swift index 3344809..540686a 100644 --- a/ios/ExpoPlayAudioStreamModule.swift +++ b/ios/ExpoPlayAudioStreamModule.swift @@ -34,47 +34,38 @@ public class ExpoPlayAudioStreamModule: Module, AudioStreamManagerDelegate { /// - `maxRecentDataDuration`: The maximum duration of recent data to keep for processing (default is 10.0 seconds). /// - promise: A promise to resolve with the recording settings or reject with an error. AsyncFunction("startRecording") { (options: [String: Any], promise: Promise) in - self.checkMicrophonePermission { granted in - guard granted else { - promise.reject("PERMISSION_DENIED", "Recording permission has not been granted") - return - } - - // Extract settings from provided options, using default values if necessary - let sampleRate = options["sampleRate"] as? Double ?? 16000.0 // it fails if not 48000, why? - let numberOfChannels = options["channelConfig"] as? Int ?? 1 // Mono channel configuration - let bitDepth = options["audioFormat"] as? Int ?? 16 // 16bits - let interval = options["interval"] as? Int ?? 1000 - - let pointsPerSecond = options["pointsPerSecond"] as? Int ?? 20 - let maxRecentDataDuration = options["maxRecentDataDuration"] as? Double ?? 10.0 - - // Create recording settings - let settings = RecordingSettings( - sampleRate: sampleRate, - desiredSampleRate: sampleRate, - numberOfChannels: numberOfChannels, - bitDepth: bitDepth, - maxRecentDataDuration: nil, - pointsPerSecond: nil - ) - - if let result = self.audioSessionManager.startRecording(settings: settings, intervalMilliseconds: interval) { - if let resError = result.error { - promise.reject("ERROR", resError) - } else { - let resultDict: [String: Any] = [ - "fileUri": result.fileUri ?? "", - "channels": result.channels ?? 1, - "bitDepth": result.bitDepth ?? 16, - "sampleRate": result.sampleRate ?? 48000, - "mimeType": result.mimeType ?? "", - ] - promise.resolve(resultDict) - } + // Extract settings from provided options, using default values if necessary + let sampleRate = options["sampleRate"] as? Double ?? 16000.0 // it fails if not 48000, why? + let numberOfChannels = options["channelConfig"] as? Int ?? 1 // Mono channel configuration + let bitDepth = options["audioFormat"] as? Int ?? 16 // 16bits + let interval = options["interval"] as? Int ?? 1000 + + + // Create recording settings + let settings = RecordingSettings( + sampleRate: sampleRate, + desiredSampleRate: sampleRate, + numberOfChannels: numberOfChannels, + bitDepth: bitDepth, + maxRecentDataDuration: nil, + pointsPerSecond: nil + ) + + if let result = self.audioSessionManager.startRecording(settings: settings, intervalMilliseconds: interval) { + if let resError = result.error { + promise.reject("ERROR", resError) } else { - promise.reject("ERROR", "Failed to start recording.") + let resultDict: [String: Any] = [ + "fileUri": result.fileUri ?? "", + "channels": result.channels ?? 1, + "bitDepth": result.bitDepth ?? 16, + "sampleRate": result.sampleRate ?? 48000, + "mimeType": result.mimeType ?? "", + ] + promise.resolve(resultDict) } + } else { + promise.reject("ERROR", "Failed to start recording.") } } @@ -194,7 +185,6 @@ public class ExpoPlayAudioStreamModule: Module, AudioStreamManagerDelegate { "totalSize": fileSize, "mimeType": manager.mimeType ] - // Emit the event to JavaScript sendEvent(audioDataEvent, eventBody) } diff --git a/package.json b/package.json index 2ecff24..cce4b4d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@mykin-ai/expo-audio-stream", - "version": "0.2.3", + "version": "0.2.4", "description": "Expo Play Audio Stream module", "main": "build/index.js", "types": "build/index.d.ts",