From 9d20d565ce19690c68d4fb3f9126606e1eb30255 Mon Sep 17 00:00:00 2001 From: hector Date: Wed, 17 Apr 2024 17:56:34 -0400 Subject: [PATCH] using external Audio Input Buffer. The current implementation assumes exclusive control over the microphone. My proposal gives the option to a consumer application using webrtc to have control of the audio, being the microphone or something else --- .../webrtc/audio/JavaAudioDeviceModule.java | 29 ++++++++++++++-- .../org/webrtc/audio/WebRtcAudioRecord.java | 34 +++++++++++++++++-- 2 files changed, 58 insertions(+), 5 deletions(-) diff --git a/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java b/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java index f675fd4fa..c83219a3c 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java @@ -16,6 +16,8 @@ import android.media.AudioManager; import android.os.Build; import androidx.annotation.RequiresApi; + +import java.nio.ByteBuffer; import java.util.concurrent.ScheduledExecutorService; import org.webrtc.JniCommon; import org.webrtc.Logging; @@ -52,6 +54,8 @@ public static class Builder { private boolean useLowLatency; private boolean enableVolumeLogger; private AudioRecordDataCallback audioRecordDataCallback; + private boolean useExternalAudioInputBuffer; + private ByteBuffer externalAudioInputBuffer; private Builder(Context context) { this.context = context; @@ -232,6 +236,22 @@ public Builder setAudioRecordDataCallback(AudioRecordDataCallback audioRecordDat return this; } + /** + * Control if the input audio should come from a external buffer. + */ + public Builder setUseExternalAudioInputBuffer(boolean useExternalAudioInputBuffer) { + this.useExternalAudioInputBuffer = useExternalAudioInputBuffer; + return this; + } + + /** + * Sets the external input audio byte buffer. + */ + public Builder setExternalAudioInputBuffer(ByteBuffer externalAudioInputBuffer) { + this.externalAudioInputBuffer = externalAudioInputBuffer; + return this; + } + /** * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership * and is responsible for calling release(). @@ -265,8 +285,9 @@ public JavaAudioDeviceModule createAudioDeviceModule() { executor = WebRtcAudioRecord.newDefaultScheduler(); } final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, - audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, - samplesReadyCallback, audioRecordDataCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); + audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, + samplesReadyCallback, audioRecordDataCallback, useHardwareAcousticEchoCanceler, + useHardwareNoiseSuppressor, useExternalAudioInputBuffer, externalAudioInputBuffer); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, audioTrackStateCallback, useLowLatency, enableVolumeLogger); @@ -368,6 +389,10 @@ public static boolean isBuiltInNoiseSuppressorSupported() { return WebRtcAudioEffects.isNoiseSuppressorSupported(); } + public void notifyExternalDataIsRecorded(int bytesRead, long captureTimeNs) { + audioInput.notifyExternalDataIsRecorded(bytesRead, captureTimeNs); + } + private final Context context; private final AudioManager audioManager; private final WebRtcAudioRecord audioInput; diff --git a/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java b/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java index 3f97d2928..445d5562e 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -108,6 +108,8 @@ class WebRtcAudioRecord { private final @Nullable SamplesReadyCallback audioSamplesReadyCallback; private final boolean isAcousticEchoCancelerSupported; private final boolean isNoiseSuppressorSupported; + private final boolean useExternalAudioInputBuffer; + private boolean recording = false; /** * Audio thread which keeps calling ByteBuffer.read() waiting for audio @@ -206,7 +208,7 @@ public void stopThread() { DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, null /* audioSamplesReadyCallback */, null /* audioRecordCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(), - WebRtcAudioEffects.isNoiseSuppressorSupported()); + WebRtcAudioEffects.isNoiseSuppressorSupported(), false, null); } public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, @@ -215,7 +217,8 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, @Nullable AudioRecordStateCallback stateCallback, @Nullable SamplesReadyCallback audioSamplesReadyCallback, @Nullable AudioRecordDataCallback audioRecordDataCallback, - boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { + boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported, + boolean useExternalAudioInputBuffer, @Nullable ByteBuffer externalAudioInputBuffer) { if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) { throw new IllegalArgumentException("HW AEC not supported"); } @@ -233,6 +236,8 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, this.audioRecordDataCallback = audioRecordDataCallback; this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported; this.isNoiseSuppressorSupported = isNoiseSuppressorSupported; + this.useExternalAudioInputBuffer = useExternalAudioInputBuffer; + this.byteBuffer = externalAudioInputBuffer; Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); } @@ -293,7 +298,9 @@ private int initRecording(int sampleRate, int channels) { } final int bytesPerFrame = channels * getBytesPerSample(audioFormat); final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND; - byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer); + if (byteBuffer == null){ + byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer); + } if (!(byteBuffer.hasArray())) { reportWebRtcAudioRecordInitError("ByteBuffer does not have backing array."); return -1; @@ -305,6 +312,10 @@ private int initRecording(int sampleRate, int channels) { // the native class cache the address to the memory once. nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer); + if (useExternalAudioInputBuffer){ + return framesPerBuffer; + } + // Get the minimum buffer size required for the successful creation of // an AudioRecord object, in byte units. // Note that this size doesn't guarantee a smooth recording under load. @@ -385,6 +396,11 @@ void setPreferredDevice(@Nullable AudioDeviceInfo preferredDevice) { @CalledByNative private boolean startRecording() { Logging.d(TAG, "startRecording"); + if (useExternalAudioInputBuffer){ + recording = true; + return true; + } + assertTrue(audioRecord != null); assertTrue(audioThread == null); try { @@ -409,6 +425,12 @@ private boolean startRecording() { @CalledByNative private boolean stopRecording() { Logging.d(TAG, "stopRecording"); + if (useExternalAudioInputBuffer){ + recording = false; + effects.release(); + releaseAudioResources(); + return true; + } assertTrue(audioThread != null); if (future != null) { if (!future.isDone()) { @@ -443,6 +465,12 @@ private static AudioRecord createAudioRecordOnMOrHigher( .build(); } + public void notifyExternalDataIsRecorded(int bytesRead, long captureTimeNs) { + if (useExternalAudioInputBuffer && recording){ + nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs); + } + } + private static AudioRecord createAudioRecordOnLowerThanM( int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) { Logging.d(TAG, "createAudioRecordOnLowerThanM");