diff --git a/stream-webrtc-android/api/stream-webrtc-android.api b/stream-webrtc-android/api/stream-webrtc-android.api index b5cff583..15952e1b 100644 --- a/stream-webrtc-android/api/stream-webrtc-android.api +++ b/stream-webrtc-android/api/stream-webrtc-android.api @@ -1974,6 +1974,10 @@ public abstract interface class org/webrtc/audio/AudioDeviceModule { public abstract fun setSpeakerMute (Z)V } +public abstract interface class org/webrtc/audio/AudioRecordDataCallback { + public abstract fun onAudioDataRecorded (IIILjava/nio/ByteBuffer;)V +} + public class org/webrtc/audio/JavaAudioDeviceModule : org/webrtc/audio/AudioDeviceModule { public static fun builder (Landroid/content/Context;)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; public fun getNativeAudioDeviceModulePointer ()J @@ -2033,6 +2037,7 @@ public class org/webrtc/audio/JavaAudioDeviceModule$Builder { public fun createAudioDeviceModule ()Lorg/webrtc/audio/JavaAudioDeviceModule; public fun setAudioAttributes (Landroid/media/AudioAttributes;)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; public fun setAudioFormat (I)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; + public fun setAudioRecordDataCallback (Lorg/webrtc/audio/AudioRecordDataCallback;)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; public fun setAudioRecordErrorCallback (Lorg/webrtc/audio/JavaAudioDeviceModule$AudioRecordErrorCallback;)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; public fun setAudioRecordStateCallback (Lorg/webrtc/audio/JavaAudioDeviceModule$AudioRecordStateCallback;)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; public fun setAudioSource (I)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; diff --git a/stream-webrtc-android/src/main/java/org/webrtc/audio/AudioRecordDataCallback.java b/stream-webrtc-android/src/main/java/org/webrtc/audio/AudioRecordDataCallback.java new file mode 100644 index 00000000..ae12f2d0 --- /dev/null +++ b/stream-webrtc-android/src/main/java/org/webrtc/audio/AudioRecordDataCallback.java @@ -0,0 +1,16 @@ +package org.webrtc.audio; + +import androidx.annotation.NonNull; + +import java.nio.ByteBuffer; + +public interface AudioRecordDataCallback { + /** + * Invoked after an audio sample is recorded. Can be used to manipulate + * the ByteBuffer before it's fed into WebRTC. Currently the audio in the + * ByteBuffer is always PCM 16bit and the buffer sample size is ~10ms. + * + * @param audioFormat format in android.media.AudioFormat + */ + void onAudioDataRecorded(int audioFormat, int channelCount, int sampleRate, @NonNull ByteBuffer audioBuffer); +} diff --git a/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java b/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java index b0c48e25..670aada7 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java @@ -54,6 +54,7 @@ public static class Builder { private AudioAttributes audioAttributes; private boolean useLowLatency; private boolean enableVolumeLogger; + private AudioRecordDataCallback audioRecordDataCallback; private Builder(Context context) { this.context = context; @@ -226,6 +227,16 @@ public Builder setEnableVolumeLogger(boolean enableVolumeLogger) { return this; } + /** + * Can be used to gain access to the raw ByteBuffer from the recording device before it's + * fed into WebRTC. You can use this to manipulate the ByteBuffer (e.g. audio filters). + * Make sure that the operation is fast. + */ + public Builder setAudioRecordDataCallback(AudioRecordDataCallback audioRecordDataCallback) { + this.audioRecordDataCallback = audioRecordDataCallback; + return this; + } + /** * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership * and is responsible for calling release(). @@ -260,7 +271,7 @@ public JavaAudioDeviceModule createAudioDeviceModule() { } final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, - samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); + samplesReadyCallback, audioRecordDataCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, audioTrackStateCallback, useLowLatency, enableVolumeLogger); diff --git a/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java b/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java index 6647e5fc..71acc885 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -104,6 +104,7 @@ class WebRtcAudioRecord { private final @Nullable AudioRecordErrorCallback errorCallback; private final @Nullable AudioRecordStateCallback stateCallback; + private final @Nullable AudioRecordDataCallback audioRecordDataCallback; private final @Nullable SamplesReadyCallback audioSamplesReadyCallback; private final boolean isAcousticEchoCancelerSupported; private final boolean isNoiseSuppressorSupported; @@ -153,6 +154,13 @@ public void run() { captureTimeNs = audioTimestamp.nanoTime; } } + + // Allow the client to intercept the ByteBuffer (to modify it) + if (audioRecordDataCallback != null) { + audioRecordDataCallback.onAudioDataRecorded(audioRecord.getAudioFormat(), + audioRecord.getChannelCount(), audioRecord.getSampleRate(), byteBuffer); + } + nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs); } if (audioSamplesReadyCallback != null) { @@ -196,7 +204,8 @@ public void stopThread() { WebRtcAudioRecord(Context context, AudioManager audioManager) { this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE, DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, - null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(), + null /* audioSamplesReadyCallback */, /* audioRecordCallback */ null, + WebRtcAudioEffects.isAcousticEchoCancelerSupported(), WebRtcAudioEffects.isNoiseSuppressorSupported()); } @@ -205,6 +214,7 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, @Nullable AudioRecordErrorCallback errorCallback, @Nullable AudioRecordStateCallback stateCallback, @Nullable SamplesReadyCallback audioSamplesReadyCallback, + @Nullable AudioRecordDataCallback audioRecordDataCallback, boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) { throw new IllegalArgumentException("HW AEC not supported"); @@ -219,6 +229,7 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, this.audioFormat = audioFormat; this.errorCallback = errorCallback; this.stateCallback = stateCallback; + this.audioRecordDataCallback = audioRecordDataCallback; this.audioSamplesReadyCallback = audioSamplesReadyCallback; this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported; this.isNoiseSuppressorSupported = isNoiseSuppressorSupported;