diff --git a/android/build.gradle b/android/build.gradle index 1424a136..e3fcba7e 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -66,7 +66,7 @@ dependencies { implementation "androidx.appcompat:appcompat:1.1.0" implementation "androidx.annotation:annotation:1.1.0" - implementation "io.dolby:sdk:[3.10.1,3.11)" + implementation "io.dolby:sdk:[3.11.0, 3.12)" testImplementation("junit:junit:${JUNIT_VERSION}") testImplementation("org.powermock:powermock-api-mockito2:${POWERMOCK_VERSION}") diff --git a/android/gradle.properties b/android/gradle.properties index 65d198ed..379e593f 100644 --- a/android/gradle.properties +++ b/android/gradle.properties @@ -5,5 +5,5 @@ MOCKITO_CORE_VERSION=2.26.0 POWERMOCK_VERSION=2.0.2 ROBOLECTRIC_VERSION=4.4 JUNIT_VERSION=4.13.2 -COMMS_SDK_VERSION="3.10.1" +COMMS_SDK_VERSION="3.11.0-beta.1" COMPONENT_NAME="react-native-sdk" \ No newline at end of file diff --git a/android/src/main/java/io/dolby/sdk/comms/reactnative/mapper/ConferenceListenOptionsMapper.kt b/android/src/main/java/io/dolby/sdk/comms/reactnative/mapper/ConferenceListenOptionsMapper.kt index 7631965f..776f9673 100644 --- a/android/src/main/java/io/dolby/sdk/comms/reactnative/mapper/ConferenceListenOptionsMapper.kt +++ b/android/src/main/java/io/dolby/sdk/comms/reactnative/mapper/ConferenceListenOptionsMapper.kt @@ -30,6 +30,7 @@ class ConferenceListenOptionsMapper { it.getOptionalInt(CONFERENCE_LISTEN_OPTIONS_MAX_VIDEO_FORWARDING) ?.let(::setMaxVideoForwarding) it.getOptionalBoolean(CONFERENCE_LISTEN_OPTIONS_SPATIAL_AUDIO)?.let(::setSpatialAudio) + it.getString(CONFERENCE_LISTEN_OPTIONS_LISTEN_TYPE)?.let(ListenTypeMapper::convertToModel)?.let(::setListenType) } } .build() @@ -45,5 +46,6 @@ class ConferenceListenOptionsMapper { private const val CONFERENCE_LISTEN_OPTIONS_ACCESS_TOKEN = "conferenceAccessToken" private const val CONFERENCE_LISTEN_OPTIONS_MAX_VIDEO_FORWARDING = "maxVideoForwarding" private const val CONFERENCE_LISTEN_OPTIONS_VIDEO_FORWARDING_STRATEGY = "videoForwardingStrategy" + private const val CONFERENCE_LISTEN_OPTIONS_LISTEN_TYPE = "listenType" } } diff --git a/android/src/main/java/io/dolby/sdk/comms/reactnative/mapper/ListenTypeMapper.kt b/android/src/main/java/io/dolby/sdk/comms/reactnative/mapper/ListenTypeMapper.kt new file mode 100644 index 00000000..281df2fa --- /dev/null +++ b/android/src/main/java/io/dolby/sdk/comms/reactnative/mapper/ListenTypeMapper.kt @@ -0,0 +1,20 @@ +package io.dolby.sdk.comms.reactnative.mapper + +import com.voxeet.sdk.models.ListenType +import java.security.InvalidParameterException + +object ListenTypeMapper { + fun convertToRN(listenType: ListenType) = when(listenType) { + ListenType.REGULAR -> "REGULAR" + ListenType.MIXED -> "MIXED" + } + + fun convertToModel(listenRNType: String) = when(listenRNType) { + REGULAR -> ListenType.REGULAR + MIXED -> ListenType.MIXED + else -> throw InvalidParameterException("Invalid value for listen type") + } + + private const val REGULAR = "REGULAR" + private const val MIXED = "MIXED" +} diff --git a/android/src/main/java/io/dolby/sdk/comms/reactnative/mapper/ParticipantMapper.kt b/android/src/main/java/io/dolby/sdk/comms/reactnative/mapper/ParticipantMapper.kt index 0cf1cc7f..cd9de12f 100644 --- a/android/src/main/java/io/dolby/sdk/comms/reactnative/mapper/ParticipantMapper.kt +++ b/android/src/main/java/io/dolby/sdk/comms/reactnative/mapper/ParticipantMapper.kt @@ -76,6 +76,7 @@ class ParticipantMapper { private fun toRNParticipantType(participantType: ParticipantType) = when (participantType) { ParticipantType.USER -> "USER" ParticipantType.LISTENER -> "LISTENER" + ParticipantType.MIXER_MIX -> "MIXER_MIX" ParticipantType.SPEAKER, ParticipantType.PSTN, ParticipantType.MIXER, diff --git a/android/src/main/java/io/dolby/sdk/comms/reactnative/services/RNSessionServiceModule.kt b/android/src/main/java/io/dolby/sdk/comms/reactnative/services/RNSessionServiceModule.kt index 7fcf430e..d300b944 100644 --- a/android/src/main/java/io/dolby/sdk/comms/reactnative/services/RNSessionServiceModule.kt +++ b/android/src/main/java/io/dolby/sdk/comms/reactnative/services/RNSessionServiceModule.kt @@ -59,7 +59,7 @@ class RNSessionServiceModule( sessionService .close() .rejectIfFalse { "Close session operation failed" } - .forward(promise) + .forward(promise, ignoreReturnType = true) } /** diff --git a/android/src/main/java/io/dolby/sdk/comms/reactnative/services/audio/RNRemoteAudioModule.kt b/android/src/main/java/io/dolby/sdk/comms/reactnative/services/audio/RNRemoteAudioModule.kt index 36674187..90aad632 100644 --- a/android/src/main/java/io/dolby/sdk/comms/reactnative/services/audio/RNRemoteAudioModule.kt +++ b/android/src/main/java/io/dolby/sdk/comms/reactnative/services/audio/RNRemoteAudioModule.kt @@ -1,5 +1,4 @@ package io.dolby.sdk.comms.reactnative.services.audio - import com.facebook.react.bridge.ReactApplicationContext import com.facebook.react.bridge.ReactContextBaseJavaModule import com.facebook.react.bridge.ReactMethod @@ -71,6 +70,34 @@ class RNRemoteAudioModule( .forward(promise) } + /** + * Sets the volume of a selected participant in non-Dolby Voice conferences to a preferred value between 0 and 1. + * Providing an unsupported number results in constraining the volume to either 0 or 1. Using the method for a selected participant + * after calling setOutputVolume overwrites the participant's volume. This method is supported in SDK 3.11 and later. + * + * @param participantRN The selected remote participant. + * @param volume The preferred volume level between 0 (no audio) and 1 (full volume). + */ + @ReactMethod + fun setParticipantVolume(participantRN: ReadableMap, volume: Float, promise: ReactPromise) { + Promises.promise(audioService.remote.setVolume(toParticipant(participantRN), volume)) + .forward(promise, ignoreReturnType = true) + } + + /** + * Sets the conference volume for the local participant. + * The method sets the volume of all remote participants to a preferred value between 0 and 1. + * Providing an unsupported volume results in constraining volume to a either 0 or 1. + * This method is supported in SDK 3.11 and later. + * + * @param volume + */ + @ReactMethod + fun setAllParticipantsVolume(volume: Float, promise: ReactPromise) { + Promises.promise(audioService.remote.setOutputVolume(volume)) + .forward(promise, ignoreReturnType = true) + } + /** * Gets [Participant] based on a React Native participant model. Throws * [IllegalArgumentException] if participant id is invalid. diff --git a/docs/classes/internal.CommandService.md b/docs/classes/internal.CommandService.md index e1f4e252..f494f7c7 100644 --- a/docs/classes/internal.CommandService.md +++ b/docs/classes/internal.CommandService.md @@ -27,7 +27,7 @@ The CommandService allows the application to send and receive text messages and ▸ **send**(`message`): `Promise`<`void`\> -Sends a message to all conference participants. +Sends a message to all conference participants. This method is not available for [mixed](doc:rn-client-sdk-enums-listentype#mixed) listeners. #### Parameters diff --git a/docs/classes/internal.ConferenceService.md b/docs/classes/internal.ConferenceService.md index 0524c620..37d99657 100644 --- a/docs/classes/internal.ConferenceService.md +++ b/docs/classes/internal.ConferenceService.md @@ -29,18 +29,12 @@ The ConferenceService allows an application to manage the conference life-cycle - [leave](internal.ConferenceService.md#leave) - [mute](internal.ConferenceService.md#mute) - [replay](internal.ConferenceService.md#replay) -- [setAudioProcessing](internal.ConferenceService.md#setaudioprocessing) -- [setMaxVideoForwarding](internal.ConferenceService.md#setmaxvideoforwarding) - [videoForwarding](internal.ConferenceService.md#videoforwarding) -- [startAudio](internal.ConferenceService.md#startaudio) - [startScreenShare](internal.ConferenceService.md#startscreenshare) -- [startVideo](internal.ConferenceService.md#startvideo) -- [stopAudio](internal.ConferenceService.md#stopaudio) - [stopScreenShare](internal.ConferenceService.md#stopscreenshare) - [setSpatialDirection](internal.ConferenceService.md#setspatialdirection) - [setSpatialEnvironment](internal.ConferenceService.md#setspatialenvironment) - [setSpatialPosition](internal.ConferenceService.md#setspatialposition) -- [stopVideo](internal.ConferenceService.md#stopvideo) - [updatePermissions](internal.ConferenceService.md#updatepermissions) - [onStatusChange](internal.ConferenceService.md#onstatuschange) - [onPermissionsChange](internal.ConferenceService.md#onpermissionschange) @@ -271,7 +265,7 @@ ___ ▸ **kick**(`participant`): `Promise`<`void`\> -Kicks a participant out of the current conference. This actions requires you to be conference owner or to have the adequate permissions to kick a participant. +Kicks a participant out of the current conference. This actions requires you to be conference owner or to have the adequate permissions to kick a participant. This method is not available for [mixed](doc:rn-client-sdk-enums-listentype#mixed) listeners. #### Parameters @@ -347,47 +341,6 @@ Replays a previously recorded conference. For more information, see the [Recordi ___ -### setAudioProcessing - -▸ **setAudioProcessing**(`options?`): `Promise`<`void`\> - -**Note**: This method is deprecated in SDK 3.7 and replaced with the [setCaptureMode](doc:rn-client-sdk-references-localaudio#setcapturemode) method. - -Enables and disables audio processing for a conference participant. - -#### Parameters - -| Name | Type | Description | -| :------ | :------ | :------ | -| `options` | [`AudioProcessingOptions`](../interfaces/internal.AudioProcessingOptions.md) | The AudioProcessingOptions model includes the AudioProcessingSenderOptions model responsible for enabling and disabling audio processing. | - -#### Returns - -`Promise`<`void`\> - -___ - -### setMaxVideoForwarding - -▸ **setMaxVideoForwarding**(`max?`, `prioritizedParticipants?`): `Promise`<`any`\> - -Sets the maximum number of video streams that may be transmitted to the local participant. - -This method is deprecated in SDK 3.6. - -#### Parameters - -| Name | Type | Default value | Description | -| :------ | :------ | :------ | :------ | -| `max` | `number` | `4` | The maximum number of video streams that may be transmitted to the local participant. The valid parameter values are between 0 and 4. By default, the parameter is set to 4. | -| `prioritizedParticipants` | [`Participant`](../interfaces/internal.Participant.md)[] | `[]` | The list of the prioritized participants. This parameter allows using a pin option to prioritize specific participant's video streams and display their videos even when these participants do not talk. | - -#### Returns - -`Promise`<`any`\> - -___ - ### videoForwarding ▸ **videoForwarding**(`options`): `Promise`<`any`\> @@ -398,7 +351,7 @@ Sets the video forwarding functionality for the local participant. The method al - Prioritizing specific participants' video streams that need to be transmitted to the local participant - Changing the [video forwarding strategy](doc:rn-client-sdk-enums-videoforwardingstrategy) that defines how the SDK should select conference participants whose videos will be received by the local participant -This method is available only in SDK 3.6 and later. +This method is available only in SDK 3.6 and later and is not available for [mixed](doc:rn-client-sdk-enums-listentype#mixed) listeners. #### Parameters @@ -412,33 +365,12 @@ This method is available only in SDK 3.6 and later. ___ -### startAudio - -▸ **startAudio**(`participant`): `Promise`<`void`\> - -**Note**: This method is deprecated in SDK 3.7 and replaced with the **start** methods that are available in the [LocalAudio](doc:rn-client-sdk-references-localaudio) and [RemoteAudio](doc:rn-client-sdk-references-remoteaudio) models. - -Starts audio transmission between the local client and a conference. The startAudio method impacts only the audio streams that the local participant sends and receives; the method does not impact the audio transmission between remote participants and a conference and does not allow the local participant to force sending remote participants’ streams to the conference or to the local participant. Depending on the specified participant in the `participant` parameter, the startAudio method starts the proper audio transmission: -- When the specified participant is the local participant, startAudio ensures sending local participant’s audio from the local client to the conference. -- When the specified participant is a remote participant, startAudio ensures sending remote participant’s audio from the conference to the local client. This allows the local participant to unmute remote participants who are locally muted through the [stopAudio](#stopaudio) method. - -#### Parameters - -| Name | Type | Description | -| :------ | :------ | :------ | -| `participant` | [`Participant`](../interfaces/internal.Participant.md) | The selected participant. If you wish to transmit the local participant's audio stream to the conference, provide the local participant's object. If you wish to receive the specific remote participants' audio streams, provide these remote participants' objects. | - -#### Returns - -`Promise`<`void`\> - -___ - ### startScreenShare ▸ **startScreenShare**(): `Promise`<`void`\> -Starts a screen sharing session. +Starts a screen sharing session. The method is available only to participants who joined a conference using the [join](doc:rn-client-sdk-conferenceservice#join) method; it is not available for listeners. + The ScreenShare with iOS document (https://docs.dolby.io/communications-apis/docs/screenshare-with-ios) describes how to set up screen-share outside the application. Instead of setting the following properties: - CommsSDK.shared.appGroup = "YOUR_APP_GROUP" @@ -447,47 +379,7 @@ Instead of setting the following properties: - Add a new `DolbyioSdkAppGroupKey` as a string type and enter the group name ("YOUR_APP_GROUP"). - Add a new `DolbyioSdkPreferredExtensionKey` as a string type and enter the broadcast extension bundle ID ("YOUR_BROADCAST_EXTENSION_BUNDLE_ID"). -#### Returns - -`Promise`<`void`\> - -___ - -### startVideo - -▸ **startVideo**(`participant`): `Promise`<`void`\> - -**Note**: This method is deprecated in SDK 3.7 and replaced with the **start** methods that are available in the [LocalVideo](doc:rn-client-sdk-references-localvideo) and [RemoteVideo](doc:rn-client-sdk-references-remotevideo) models. - -Notifies the server to either start sending the local participant's video stream to the conference or start sending a remote participant's video stream to the local participant. The startVideo method does not control the remote participant's video stream; if a remote participant does not transmit any video stream, the local participant cannot change it using the startVideo method. - -#### Parameters - -| Name | Type | Description | -| :------ | :------ | :------ | -| `participant` | [`Participant`](../interfaces/internal.Participant.md) | The participant who will receive the video stream, either remote or local. | - -#### Returns - -`Promise`<`void`\> - -___ - -### stopAudio - -▸ **stopAudio**(`participant`): `Promise`<`void`\> - -**Note**: This method is deprecated in SDK 3.7 and replaced with the **stop** methods that are available in the [LocalAudio](doc:rn-client-sdk-references-localaudio) and [RemoteAudio](doc:rn-client-sdk-references-remoteaudio) models. - -Stops audio transmission between the local client and a conference. The stopAudio method impacts only the audio streams that the local participant sends and receives; the method does not impact the audio transmission between remote participants and a conference and does not allow the local participant to stop sending remote participants’ streams to the conference. Depending on the specified participant in the `participant` parameter, the stopAudio method stops the proper audio transmission: -- When the specified participant is the local participant, stopAudio stops sending local participant’s audio from the local client to the conference. -- When the specified participant is a remote participant, stopAudio stops sending remote participant’s audio from the conference to the local client. This allows the local participant to locally mute remote participants. - -#### Parameters - -| Name | Type | Description | -| :------ | :------ | :------ | -| `participant` | [`Participant`](../interfaces/internal.Participant.md) | The selected participant. If you wish to not transmit the local participant's audio stream to the conference, provide the local participant's object. If you wish to not receive the specific remote participants' audio streams, provide these remote participants' objects. | +The SDK 3.10 and earlier support sharing only one screen per conference. The SDK 3.11 and later allow sharing two screens in one conference, so two participants can share their screens at the same time. #### Returns @@ -499,7 +391,7 @@ ___ ▸ **stopScreenShare**(): `Promise`<`void`\> -Stops a screen sharing session. +Stops a screen sharing session. The method is available only to participants who joined a conference using the [join](doc:rn-client-sdk-conferenceservice#join) method; it is not available for listeners. #### Returns @@ -633,26 +525,6 @@ For example, if a local participant Eric, who does not have a set direction, cal ___ -### stopVideo - -▸ **stopVideo**(`participant`): `Promise`<`void`\> - -**Note**: This method is deprecated in SDK 3.7 and replaced with the **stop** methods that are available in the [LocalVideo](doc:rn-client-sdk-references-localvideo) and [RemoteVideo](doc:rn-client-sdk-references-remotevideo) models. - -Notifies the server to either stop sending the local participant's video stream to the conference or stop sending a remote participant's video stream to the local participant. - -#### Parameters - -| Name | Type | Description | -| :------ | :------ | :------ | -| `participant` | [`Participant`](../interfaces/internal.Participant.md) | The participant who wants to stop receiving a video stream. | - -#### Returns - -`Promise`<`void`\> - -___ - ### updatePermissions ▸ **updatePermissions**(`participantPermissions`): `Promise`<`void`\> diff --git a/docs/classes/internal.FilePresentationService.md b/docs/classes/internal.FilePresentationService.md index eedf6183..34a32ecf 100644 --- a/docs/classes/internal.FilePresentationService.md +++ b/docs/classes/internal.FilePresentationService.md @@ -24,6 +24,8 @@ The FilePresentationService allows presenting files during a conference. The Dol **10.** The presenter and the viewers receive [information](doc:rn-client-sdk-models-filepresentation) about the end of the file presentation via the [onFilePresentationChange](#onfilepresentationchange) listener. +The service is available only to participants who joined a conference using the [join](doc:rn-client-sdk-conferenceservice#join) method; it is not available for listeners. + ## Table of contents ### Constructors diff --git a/docs/classes/internal.LocalAudio.md b/docs/classes/internal.LocalAudio.md index 65f93ae6..462fec28 100644 --- a/docs/classes/internal.LocalAudio.md +++ b/docs/classes/internal.LocalAudio.md @@ -100,10 +100,9 @@ ___ ▸ **start**(): `Promise`<`void`\> -Starts sending the local participant’s audio streams from the local client to the conference. -This method is not available for listeners in Dolby Voice conferences. +Starts sending the local participant’s audio stream to a conference. The method is available only to participants who joined a conference using the [join](doc:rn-client-sdk-conferenceservice#join) method; it is not available for listeners. -The method requires up to a few seconds to become effective. +The method requires a few seconds to become effective. #### Returns @@ -115,8 +114,7 @@ ___ ▸ **stop**(): `Promise`<`void`\> -Stops sending local participant’s audio from the local client to the conference. -This method is not available for listeners in Dolby Voice conferences. +Stops sending local participant’s audio to a conference. The method is available only to participants who joined a conference using the [join](doc:rn-client-sdk-conferenceservice#join) method; it is not available for listeners. The method requires up to a few seconds to become effective. diff --git a/docs/classes/internal.LocalVideo.md b/docs/classes/internal.LocalVideo.md index df27b5b4..f04f7d49 100644 --- a/docs/classes/internal.LocalVideo.md +++ b/docs/classes/internal.LocalVideo.md @@ -29,7 +29,7 @@ This model is supported only in SDK 3.7 and later. ▸ **start**(): `Promise`<`void`\> -Enables the local participant's video and sends the video to a conference. +Enables the local participant's video and sends the video to a conference. The method is available only to participants who joined a conference using the [join](doc:rn-client-sdk-conferenceservice#join) method; it is not available for listeners. #### Returns @@ -41,7 +41,8 @@ ___ ▸ **stop**(): `Promise`<`void`\> -Disables the local participant's video and stops sending the video to a conference. +Disables the local participant's video and stops sending the video to a conference. The method is available only to participants who joined a conference using the [join](doc:rn-client-sdk-conferenceservice#join) method; it is not available for listeners. + Use this method only when the current participant is at the conference. #### Returns diff --git a/docs/classes/internal.MediaDeviceService.md b/docs/classes/internal.MediaDeviceService.md index b9f233cf..007124ab 100644 --- a/docs/classes/internal.MediaDeviceService.md +++ b/docs/classes/internal.MediaDeviceService.md @@ -12,9 +12,7 @@ The MediaDeviceService allows an application to manage media devices that are us ### Methods -- [getComfortNoiseLevel](internal.MediaDeviceService.md#getcomfortnoiselevel) - [isFrontCamera](internal.MediaDeviceService.md#isfrontcamera) -- [setComfortNoiseLevel](internal.MediaDeviceService.md#setcomfortnoiselevel) - [switchCamera](internal.MediaDeviceService.md#switchcamera) - [switchSpeaker](internal.MediaDeviceService.md#switchspeaker) @@ -26,20 +24,6 @@ The MediaDeviceService allows an application to manage media devices that are us ## Methods -### getComfortNoiseLevel - -▸ **getComfortNoiseLevel**(): `Promise`<[`ComfortNoiseLevel`](../enums/internal.ComfortNoiseLevel.md)\> - -**Note**: This method is deprecated in SDK 3.7 and replaced with the [getComfortNoiseLevel](doc:rn-client-sdk-references-localaudio#getcomfortnoiselevel) method available in the [LocalAudio](doc:rn-client-sdk-references-localaudio) model - -Retrieves the comfort noise level setting for output devices in Dolby Voice conferences. - -#### Returns - -`Promise`<[`ComfortNoiseLevel`](../enums/internal.ComfortNoiseLevel.md)\> - -___ - ### isFrontCamera ▸ **isFrontCamera**(): `Promise`<`boolean`\> @@ -52,26 +36,6 @@ Checks whether an application uses the front-facing (true) or back-facing camera ___ -### setComfortNoiseLevel - -▸ **setComfortNoiseLevel**(`noiseLevel`): `Promise`<`void`\> - -**Note**: This method is deprecated in SDK 3.7 and replaced with the [setComfortNoiseLevel](doc:rn-client-sdk-references-localaudio#setcomfortnoiselevel) method available in the [LocalAudio](doc:rn-client-sdk-references-localaudio) model. - -Configures the comfort noise level for output devices in Dolby Voice conferences. - -#### Parameters - -| Name | Type | Description | -| :------ | :------ | :------ | -| `noiseLevel` | [`ComfortNoiseLevel`](../enums/internal.ComfortNoiseLevel.md) | The selected comfort noise level. | - -#### Returns - -`Promise`<`void`\> - -___ - ### switchCamera ▸ **switchCamera**(): `Promise`<`void`\> diff --git a/docs/classes/internal.RemoteAudio.md b/docs/classes/internal.RemoteAudio.md index f647acfc..f5c20d3f 100644 --- a/docs/classes/internal.RemoteAudio.md +++ b/docs/classes/internal.RemoteAudio.md @@ -16,6 +16,8 @@ This model is supported only in SDK 3.7 and later. - [start](internal.RemoteAudio.md#start) - [stop](internal.RemoteAudio.md#stop) +- [setParticipantVolume](internal.RemoteAudio.md#setparticipantvolume) +- [setAllParticipantsVolume](internal.RemoteAudio.md#setallparticipantsvolume) ## Constructors @@ -29,9 +31,9 @@ This model is supported only in SDK 3.7 and later. ▸ **start**(`participant`): `Promise`<`void`\> -Allows the local participant to unmute a specific remote participant who is locally muted through the stop method. The start method does not impact audio transmission between remote participants and a conference and does not allow the local participant to force sending remote participants’ streams to the conference or to the local participant. This method is not available for listeners in Dolby Voice conferences. +Allows the local participant to start receiving audio from a specific remote participant. The start method does not impact audio transmission between remote participants and a conference and does not allow the local participant to force sending remote participants’ streams to the conference or to the local participant. The method requires a few seconds to become effective. -The start method requires up to a few seconds to become effective. +In Dolby Voice conferences and in the case of [mixed](doc:rn-client-sdk-enums-listentype#mixed) listeners, the method works as [mute](doc:rn-client-sdk-conferenceservice#mute), so it locally starts playing audio from a remote participant. #### Parameters @@ -49,9 +51,9 @@ ___ ▸ **stop**(`participant`): `Promise`<`void`\> -Allows the local participant to locally mute specific remote participants. This method does not impact audio transmission between remote participants and a conference and does not allow the local participant to stop sending remote participants’ streams to the conference. This method is not available for listeners in Dolby Voice conferences. +Allows the local participant to stop receiving audio from specific remote participants. This method does not impact audio transmission between remote participants and a conference and does not allow the local participant to stop sending remote participants’ streams to the conference. The method requires up to a few seconds to become effective. -The stop method requires up to a few seconds to become effective. +In Dolby Voice conferences and in the case of [mixed](doc:rn-client-sdk-enums-listentype#mixed) listeners, the method works as [mute](doc:rn-client-sdk-conferenceservice#mute), so it locally stops playing audio while the audio stream is still received. #### Parameters @@ -62,3 +64,47 @@ The stop method requires up to a few seconds to become effective. #### Returns `Promise`<`void`\> + +___ + +### setParticipantVolume + +▸ **setParticipantVolume**(`participant`, `volume`): `Promise`<`void`\> + +Sets the volume of a selected participant in non-Dolby Voice conferences to a preferred value between 0 and 1. +Providing an unsupported number results in constraining the volume to either 0 or 1. +Using the method for a selected participant after calling setOutputVolume overwrites the participant's volume. + +This method is supported in SDK 3.11 and later. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `participant` | [`Participant`](../interfaces/internal.Participant.md) | The selected remote participant. | +| `volume` | `number` | The preferred volume level between 0 (no audio) and 1 (full volume). | + +#### Returns + +`Promise`<`void`\> + +___ + +### setAllParticipantsVolume + +▸ **setAllParticipantsVolume**(`volume`): `Promise`<`void`\> + +Sets the conference volume for the local participant. The method sets the volume of all remote participants to a preferred value between 0 and 1. +Providing an unsupported volume results in constraining volume to a either 0 or 1. + +This method is supported in SDK 3.11 and later. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `volume` | `number` | The preferred volume level between 0 (no audio) and 1 (full volume). | + +#### Returns + +`Promise`<`void`\> diff --git a/docs/classes/internal.RemoteVideo.md b/docs/classes/internal.RemoteVideo.md index 3c2618b9..80b94409 100644 --- a/docs/classes/internal.RemoteVideo.md +++ b/docs/classes/internal.RemoteVideo.md @@ -31,7 +31,7 @@ This model is supported only in SDK 3.7 and later. If the local participant used the stop method to stop receiving video streams from selected remote participants, the start method allows the participant to start receiving video streams from these participants. The start method does not impact the video transmission between remote participants and a conference and does not allow the local participant to force sending remote participants’ streams to the conference or to the local participant. -The start method requires a few seconds to become effective. +The start method requires a few seconds to become effective. The method is not available for [mixed](doc:rn-client-sdk-enums-listentype#mixed) listeners. #### Parameters @@ -51,7 +51,7 @@ ___ Allows the local participant to stop receiving video from specific remote participants. This method does not impact audio transmission between remote participants and a conference and does not allow the local participant to stop sending remote participants’ streams to the conference. -The stop method requires a few seconds to become effective. +The stop method requires a few seconds to become effective. The method is not available for [mixed](doc:rn-client-sdk-enums-listentype#mixed) listeners. #### Parameters diff --git a/docs/classes/internal.VideoPresentationService.md b/docs/classes/internal.VideoPresentationService.md index b1d89c75..60304cd8 100644 --- a/docs/classes/internal.VideoPresentationService.md +++ b/docs/classes/internal.VideoPresentationService.md @@ -18,6 +18,8 @@ The VideoPresentationService allows sharing videos during a conference. To prese **6.** The presenter calls the [stop](#stop) method to stop the video presentation. In such a situation, all conference participants receive this information via the [onVideoPresentationStopped](#onvideopresentationstopped) listener. +The service is available only to participants who joined a conference using the [join](doc:rn-client-sdk-conferenceservice#join) method; it is not available for listeners. + ## Table of contents ### Constructors diff --git a/docs/enums/internal.ListenType.md b/docs/enums/internal.ListenType.md new file mode 100644 index 00000000..688c4dd8 --- /dev/null +++ b/docs/enums/internal.ListenType.md @@ -0,0 +1,28 @@ +# Enumeration: ListenType + +[internal](../modules/internal.md).ListenType + +The ListenType model gathers the possible types of listeners. This model is available in SDK 3.11 and later. + +## Table of contents + +### Enumeration Members + +- [REGULAR](internal.ListenType.md#regular) +- [MIXED](internal.ListenType.md#mixed) + +## Enumeration Members + +### REGULAR + +• **REGULAR** = ``"REGULAR"`` + +A regular listener who receives one mixed audio stream from a conference and one video stream from each participant who sends video to a conference. + +___ + +### MIXED + +• **MIXED** = ``"MIXED"`` + +A participant who receives one mixed audio stream and one mixed video stream from a conference, which increases the conference capacity. The platform can support up to 60,000 mixed listeners while maintaining under half a second of latency. diff --git a/docs/enums/internal.ParticipantType.md b/docs/enums/internal.ParticipantType.md index 6930479a..242b781e 100644 --- a/docs/enums/internal.ParticipantType.md +++ b/docs/enums/internal.ParticipantType.md @@ -10,6 +10,7 @@ The ParticipantTypes enum gathers the possible types of conference participants. - [LISTENER](internal.ParticipantType.md#listener) - [USER](internal.ParticipantType.md#user) +- [MIXER\_MIX](internal.ParticipantType.md#mixer_mix) - [UNKNOWN](internal.ParticipantType.md#unknown) ## Enumeration Members @@ -30,6 +31,14 @@ A participant who can send and receive audio and video during the conference. ___ +### MIXER\_MIX + +• **MIXER\_MIX** = ``"MIXER_MIX"`` + +A special participant responsible for mixing video and sending one mixed video stream from a conference to each participant who joined the conference as a mixed listener. This type is available in SDK 3.11 and later. + +___ + ### UNKNOWN • **UNKNOWN** = ``"UNKNOWN"`` diff --git a/docs/interfaces/internal.ConferenceJoinOptions.md b/docs/interfaces/internal.ConferenceJoinOptions.md index 808144a3..57cd9c85 100644 --- a/docs/interfaces/internal.ConferenceJoinOptions.md +++ b/docs/interfaces/internal.ConferenceJoinOptions.md @@ -56,7 +56,7 @@ ___ • `Optional` **preferRecvMono**: `boolean` -Indicates whether a participant wants to receive mono sound. By default, participants receive stereo audio. This configuration is only applicable when using the Opus codec and is available in non-Dolby Voice and Dolby Voice conferences. +Indicates whether a participant wants to receive mono sound. By default, the property is set to false. ___ @@ -64,7 +64,7 @@ ___ • `Optional` **preferSendMono**: `boolean` -Indicates whether a participant wants to send mono sound to a conference. By default, when using the Opus codec, participants' audio is sent as stereo. This configuration is only applicable when using the Opus codec and is available in non-Dolby Voice and Dolby Voice conferences. +Indicates whether a participant wants to send mono sound to a conference. By default, the property is set to true in most cases. The only situation when the property is set to false is when you set the [capture mode](doc:rn-client-sdk-references-localaudio#setcapturemode) to Unprocessed before joining a conference while using Opus. ___ diff --git a/docs/interfaces/internal.ConferenceListenOptions.md b/docs/interfaces/internal.ConferenceListenOptions.md index b76db472..ec4542ac 100644 --- a/docs/interfaces/internal.ConferenceListenOptions.md +++ b/docs/interfaces/internal.ConferenceListenOptions.md @@ -12,6 +12,7 @@ The ConferenceListenOptions interface defines how the application expects to joi - [maxVideoForwarding](internal.ConferenceListenOptions.md#maxvideoforwarding) - [spatialAudio](internal.ConferenceListenOptions.md#spatialaudio) - [videoForwardingStrategy](internal.ConferenceListenOptions.md#videoforwardingstrategy) +- [listenType](internal.ConferenceListenOptions.md#listentype) ## Properties @@ -51,3 +52,11 @@ ___ • `Optional` **videoForwardingStrategy**: [`VideoForwardingStrategy`](../enums/internal.VideoForwardingStrategy.md) Changes the video forwarding strategy for the local participant. This option is available only in SDK 3.6 and later. + +___ + +### listenType + +• `Optional` **listenType**: [`ListenType`](../enums/internal.ListenType.md) + +The listener type that indicates whether a participant wishes to join a conference as a regular listener or a mixed listener. This property is available in SDK 3.11 and later. diff --git a/docs/interfaces/internal.Participant.md b/docs/interfaces/internal.Participant.md index bc4d9c5a..196acace 100644 --- a/docs/interfaces/internal.Participant.md +++ b/docs/interfaces/internal.Participant.md @@ -20,7 +20,7 @@ The Participant interface gathers information about a conference participant. • **id**: `string` -The participant's ID. +The participant ID. ___ @@ -36,7 +36,7 @@ ___ • `Optional` **status**: [`ParticipantStatus`](../enums/internal.ParticipantStatus.md) -The participant's status. +The participant status. ___ @@ -52,4 +52,4 @@ ___ • `Optional` **type**: [`ParticipantType`](../enums/internal.ParticipantType.md) -The participant's type. +The participant type. diff --git a/docs/interfaces/internal.VideoForwardingOptions.md b/docs/interfaces/internal.VideoForwardingOptions.md index 5ed0f9b6..e04837e9 100644 --- a/docs/interfaces/internal.VideoForwardingOptions.md +++ b/docs/interfaces/internal.VideoForwardingOptions.md @@ -22,7 +22,7 @@ The VideoForwardingOptions model allows configuring the Video Forwarding functio • `Optional` **max**: `number` -The maximum number of video streams that may be transmitted to the local participant. The valid values are between 0 and 25. The default value is 4. In the case of providing a value smaller than 0 or greater than 25, SDK triggers an error. +The maximum number of video streams that may be transmitted to the local participant. The valid values are between 0 and 49. The default value is 4. In the case of providing a value smaller than 0 or greater than 49, SDK triggers an error. ___ diff --git a/docs/modules/internal.md b/docs/modules/internal.md index 9c766aae..181e6781 100644 --- a/docs/modules/internal.md +++ b/docs/modules/internal.md @@ -82,6 +82,7 @@ - [MediaStreamType](../enums/internal.MediaStreamType.md) - [SpatialAudioStyle](../enums/internal.SpatialAudioStyle.md) - [VideoForwardingStrategy](../enums/internal.VideoForwardingStrategy.md) +- [ListenType](../enums/internal.ListenType.md) - [RecordingStatus](../enums/internal.RecordingStatus.md) - [VideoPresentationState](../enums/internal.VideoPresentationState.md) diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index 39097043..02656537 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -333,9 +333,9 @@ PODS: - React-jsinspector (0.71.3) - React-logger (0.71.3): - glog - - react-native-comms-sdk (3.10.0): + - react-native-comms-sdk (3.11.0-beta.1): - React-Core - - VoxeetSDK (~> 3.10.1) + - VoxeetSDK (~> 3.11.0) - react-native-document-picker (7.1.3): - React-Core - react-native-safe-area-context (3.4.1): @@ -458,7 +458,7 @@ PODS: - ReactCommon/turbomodule/core - Yoga - SocketRocket (0.6.0) - - VoxeetSDK (3.10.1) + - VoxeetSDK (3.11.0) - Yoga (1.14.0) - YogaKit (1.18.1): - Yoga (~> 1.14) @@ -676,7 +676,7 @@ SPEC CHECKSUMS: React-jsiexecutor: 515b703d23ffadeac7687bc2d12fb08b90f0aaa1 React-jsinspector: 9f7c9137605e72ca0343db4cea88006cb94856dd React-logger: 957e5dc96d9dbffc6e0f15e0ee4d2b42829ff207 - react-native-comms-sdk: ce52cf67ff732bc3166459f26462c5bed4ffed2e + react-native-comms-sdk: 567b8e494852aac4301685135a251834812f609a react-native-document-picker: ec07866a30707f23660c0f3ae591d669d3e89096 react-native-safe-area-context: 9e40fb181dac02619414ba1294d6c2a807056ab9 React-perflogger: af8a3d31546077f42d729b949925cc4549f14def @@ -697,10 +697,10 @@ SPEC CHECKSUMS: RNPermissions: 314155ed6ce65237e7bd9fb6239219cce83228d3 RNReanimated: cc5e3aa479cb9170bcccf8204291a6950a3be128 SocketRocket: fccef3f9c5cedea1353a9ef6ada904fde10d6608 - VoxeetSDK: bc2f2566624c2ad13dfb0fa4498020e7c15e85fa + VoxeetSDK: 04a92e5ff875404191f93de89557cf8250b81f7e Yoga: 5ed1699acbba8863755998a4245daa200ff3817b YogaKit: f782866e155069a2cca2517aafea43200b01fd5a PODFILE CHECKSUM: 20a3e54e5b6e278732f5d890081ed875e8df52ce -COCOAPODS: 1.11.3 +COCOAPODS: 1.12.1 diff --git a/example/package.json b/example/package.json index f5171d48..1ad936f4 100644 --- a/example/package.json +++ b/example/package.json @@ -1,7 +1,7 @@ { "name": "react-native-comms-sdk-example", "description": "Example app for react-native-dolbyio-sdk", - "version": "3.10.1+1", + "version": "3.11.0-beta.1+1", "license": "MIT", "private": true, "scripts": { diff --git a/example/src/components/DolbyIOProvider/DolbyIOProvider.tsx b/example/src/components/DolbyIOProvider/DolbyIOProvider.tsx index 99f23a61..9a46a04b 100644 --- a/example/src/components/DolbyIOProvider/DolbyIOProvider.tsx +++ b/example/src/components/DolbyIOProvider/DolbyIOProvider.tsx @@ -18,9 +18,11 @@ import type { UnsubscribeFunction, ConferenceCreateParameters, ConferenceServiceEventNames, + ConferenceListenOptions, } from '@dolbyio/comms-sdk-react-native/models'; import { Codec, + ListenType, RTCPMode, SpatialAudioStyle, SubscriptionType @@ -36,22 +38,25 @@ export interface IDolbyIOProvider { conference?: Conference; conferenceStatus?: ConferenceStatus; participants: Participant[]; + isBottomSheetVisible: Boolean initialize: (token: string, refreshToken: () => Promise) => void; openSession: (name: string, externalId?: string) => Promise; closeSession: () => Promise; isOpen: () => Promise; createAndJoin: (alias: string, params: ConferenceCreateParameters) => void; - listen: (alias: string) => void; + listen: (alias: string, listenType?: ListenType) => void; joinWithId: (conferenceId: string) => void; replay: () => void; getCurrentConference: () => void; goToAudioPreviewScreen: (isVisible: boolean) => void; leave: (leaveRoom: boolean) => void; setSessionParticipant: () => void; + setBottomSheetVisibility: (isVisible: boolean) => void; } export const DolbyIOContext = React.createContext({ isInitialized: false, + isBottomSheetVisible: false, me: undefined, conference: undefined, conferenceStatus: undefined, @@ -69,6 +74,7 @@ export const DolbyIOContext = React.createContext({ getCurrentConference: () => {}, goToAudioPreviewScreen: () => {}, setSessionParticipant: () => {}, + setBottomSheetVisibility: () => {}, }); type DolbyProps = { @@ -76,6 +82,7 @@ type DolbyProps = { }; const DolbyIOProvider: React.FC = ({ children }) => { + const [isBottomSheetVisible, setIsBottomSheetVisible] = useState(false); const [isInitialized, setIsInitialized] = useState(false); const [me, setMe] = useState(undefined); const [isAudioPreviewScreen, setIsAudioPreviewScreen] = useState(false); @@ -160,14 +167,9 @@ const DolbyIOProvider: React.FC = ({ children }) => { } const openSession = async (name: string, externalId?: string) => { - const timeoutPromise = setTimeout(() => { - CommsAPI.session.close(); - }, 5000); try { await CommsAPI.session.open({ name, externalId }); - clearTimeout(timeoutPromise); } catch (e: any) { - clearTimeout(timeoutPromise); Alert.alert('Session not opened', e.toString()); } }; @@ -175,7 +177,6 @@ const DolbyIOProvider: React.FC = ({ children }) => { const closeSession = async () => { try { await CommsAPI.session.close(); - setIsInitialized(false); } catch (e: any) { Alert.alert('Session not opened', e.toString()); } @@ -263,7 +264,7 @@ const DolbyIOProvider: React.FC = ({ children }) => { } }; - const listen = async (alias: string) => { + const listen = async (alias: string, listenType: ListenType = ListenType.REGULAR) => { try { const conferenceParams = { rtcpMode: RTCPMode.AVERAGE, @@ -280,9 +281,10 @@ const DolbyIOProvider: React.FC = ({ children }) => { conferenceOptions ); - const listenOptions = { + const listenOptions: ConferenceListenOptions = { maxVideoForwarding: 4, spatialAudio: false, + listenType: listenType }; const joinedConference = await CommsAPI.conference.listen( createdConference, @@ -347,16 +349,7 @@ const DolbyIOProvider: React.FC = ({ children }) => { const leave = async (leaveRoom: boolean) => { try { - const conferenceLeaveOptions = { - leaveRoom, - }; - await CommsAPI.conference.leave(conferenceLeaveOptions); - leaveActions(); - if (leaveRoom) { - setMe(undefined); - } - - CommsAPI.notification.unsubscribe( + await CommsAPI.notification.unsubscribe( [ SubscriptionType.ActiveParticipants, SubscriptionType.ConferenceCreated, @@ -366,6 +359,15 @@ const DolbyIOProvider: React.FC = ({ children }) => { SubscriptionType.ParticipantLeft ].map( (s) => { return { type: s, conferenceAlias: conference?.alias ?? "" } }) ); + + const conferenceLeaveOptions = { + leaveRoom, + }; + await CommsAPI.conference.leave(conferenceLeaveOptions); + leaveActions(); + if (leaveRoom) { + setMe(undefined); + } } catch (e: any) { Alert.alert('Conference leave with errors', e); leaveActions(); @@ -449,8 +451,13 @@ const DolbyIOProvider: React.FC = ({ children }) => { setIsAudioPreviewScreen(isVisible); } + const setBottomSheetVisibility = (isVisible: boolean) => { + setIsBottomSheetVisible(isVisible); + } + const contextValue = { isInitialized, + isBottomSheetVisible, isAudioPreviewScreen, me, conference, @@ -468,6 +475,7 @@ const DolbyIOProvider: React.FC = ({ children }) => { getCurrentConference, goToAudioPreviewScreen, setSessionParticipant, + setBottomSheetVisibility, }; return ( diff --git a/example/src/screens/AudioPreviewScreen/AudioPreviewScreen.style.ts b/example/src/screens/AudioPreviewScreen/AudioPreviewScreen.style.ts index 9a893ebb..37b0b6da 100644 --- a/example/src/screens/AudioPreviewScreen/AudioPreviewScreen.style.ts +++ b/example/src/screens/AudioPreviewScreen/AudioPreviewScreen.style.ts @@ -27,4 +27,7 @@ export default StyleSheet.create({ backgroundColor: COLORS.BLUE, borderRadius: SPACE_L, }, + center: { + alignItems: 'center', + }, }); diff --git a/example/src/screens/AudioPreviewScreen/AudioPreviewScreen.tsx b/example/src/screens/AudioPreviewScreen/AudioPreviewScreen.tsx index 592b7ec9..50b8ab3d 100644 --- a/example/src/screens/AudioPreviewScreen/AudioPreviewScreen.tsx +++ b/example/src/screens/AudioPreviewScreen/AudioPreviewScreen.tsx @@ -150,7 +150,7 @@ const AudioPreviewScreen: FunctionComponent = () => { > - + Dolby.io diff --git a/example/src/screens/ConferenceScreen/ConferenceScreen.style.ts b/example/src/screens/ConferenceScreen/ConferenceScreen.style.ts index 3227cb9b..c6ed855a 100644 --- a/example/src/screens/ConferenceScreen/ConferenceScreen.style.ts +++ b/example/src/screens/ConferenceScreen/ConferenceScreen.style.ts @@ -25,7 +25,7 @@ export default StyleSheet.create({ top: {}, topBar: { flexDirection: 'row', - justifyContent: 'space-between', + justifyContent: 'flex-end', alignItems: 'center', }, center: { @@ -87,7 +87,17 @@ export default StyleSheet.create({ elevation: SHADOWS.m.elevation, flexDirection: 'column', }, - + volumeModalContainer: { + backgroundColor: 'lightgray', + borderRadius: SPACE_XXS, + width: '80%', + height: '30%', + shadowOffset: SHADOWS.m.shadowOffset, + shadowOpacity: SHADOWS.m.shadowOpacity, + shadowRadius: SHADOWS.m.shadowRadius, + elevation: SHADOWS.m.elevation, + flexDirection: 'column', + }, modalTitleSection: { flex: 1, }, @@ -130,6 +140,7 @@ export default StyleSheet.create({ }, centerButtons: { flexDirection: 'row', + marginBottom: 20, }, videoButtonGreen: { backgroundColor: COLORS.GREEN, diff --git a/example/src/screens/ConferenceScreen/ConferenceScreen.tsx b/example/src/screens/ConferenceScreen/ConferenceScreen.tsx index f2a53a4d..9d643b13 100644 --- a/example/src/screens/ConferenceScreen/ConferenceScreen.tsx +++ b/example/src/screens/ConferenceScreen/ConferenceScreen.tsx @@ -1,10 +1,8 @@ import React, { FunctionComponent, useContext, useMemo, useState } from 'react'; import { Image, TouchableOpacity, View } from 'react-native'; -import { ScrollView } from 'react-native-gesture-handler'; import LinearGradient from 'react-native-linear-gradient'; import { MenuProvider } from 'react-native-popup-menu'; import { SafeAreaView } from 'react-native-safe-area-context'; - import { DolbyIOContext } from '@components/DolbyIOProvider'; import { FilePresentationContext } from '@components/FilePresentationHandler'; import { RecordingContext } from '@components/RecordingProvider'; @@ -15,12 +13,10 @@ import VideoGallery from '@screens/ConferenceScreen/VideoGallery'; import Space from '@ui/Space'; import Text from '@ui/Text'; import { mute, unmute } from '@utils/conference.tester'; - -import { ParticipantStatus, Participant } from '@dolbyio/comms-sdk-react-native/models'; +import { ParticipantStatus } from '@dolbyio/comms-sdk-react-native/models'; import styles from './ConferenceScreen.style'; import ConferenceScreenBottomSheet from './ConferenceScreenBottomSheet'; import MessageModal from './MessageModal'; -import ParticipantAvatar from './ParticipantAvatar'; import { startLocalVideo, stopLocalVideo } from '@utils/video.tester'; const DISPLAYED_STATUSES: ParticipantStatus[] = [ @@ -29,7 +25,7 @@ const DISPLAYED_STATUSES: ParticipantStatus[] = [ ]; const ConferenceScreen: FunctionComponent = () => { - const { me, conference, participants } = useContext(DolbyIOContext); + const { me, conference, participants, isBottomSheetVisible, setBottomSheetVisibility } = useContext(DolbyIOContext); const { isRecording } = useContext(RecordingContext); const { fileSrc, isPresentingFile, fileOwnerName } = useContext( FilePresentationContext @@ -38,6 +34,8 @@ const ConferenceScreen: FunctionComponent = () => { const [scaleType, setScaleType] = useState<'fill' | 'fit'>('fill'); const [isMessageModalActive, setIsMessageModalActive] = useState(false); + const [isVideoOn, setIsVideoOn] = useState(false); + const [isMuted, setIsMuted] = useState(false); const connectedParticipants = useMemo(() => { return participants.filter( @@ -49,6 +47,24 @@ const ConferenceScreen: FunctionComponent = () => { return ; } + const onPressVideoButton = () => { + if (isVideoOn) { + stopLocalVideo(); + } else { + startLocalVideo(); + } + setIsVideoOn(!isVideoOn); + }; + + const onPressMuteButton = () => { + if (isMuted) { + unmute(me); + } else { + mute(me); + } + setIsMuted(!isMuted); + }; + return ( @@ -60,19 +76,25 @@ const ConferenceScreen: FunctionComponent = () => { - - Logged as: {me.info.name} + + + + Conference: {conference.alias} + + - - Conference: {conference.alias} - {isRecording ? ( ) : null} + + + - {isPresentingFile && fileSrc ? ( @@ -86,110 +108,55 @@ const ConferenceScreen: FunctionComponent = () => { ) : null} - - - setIsMessageModalActive(true)} - > - - SEND MSG - - - - - { - mute(me); - }} - > - - MUTE ME - - - - - { - unmute(me); - }} - > - - UNMUTE ME - - - - - startLocalVideo()} - > - - START VIDEO - - - - - stopLocalVideo()} - > - - STOP VIDEO - - - - - { - setScaleType(scaleType === 'fill' ? 'fit' : 'fill'); - }} - > - - FILL/FIT - - - - + + onPressMuteButton()} + > + + {isMuted ? 'UNMUTE ME' : 'MUTE ME'} + + + + + onPressVideoButton()} + > + + {isVideoOn ? 'STOP VIDEO' : 'START VIDEO'} + + + + + { + setScaleType(scaleType === 'fill' ? 'fit' : 'fill'); + }} + > + + FILL/FIT + + + + + { + setBottomSheetVisibility(true); + }} + > + + TEST BUTTONS + + + - - - {`Participants (${connectedParticipants.length})`} - - - - - {connectedParticipants.map((p: Participant) => ( - - ))} - - - - - - - - + {isBottomSheetVisible ? () : null} { const bottomSheetRef = useRef(null); - const { me, conference } = useContext(DolbyIOContext); + const { me, conference, setBottomSheetVisibility } = useContext(DolbyIOContext); const { startRecord, stopRecord } = useContext(RecordingContext); const { participants } = conference as Conference; let convertedFile: FileConverted | null = null; @@ -160,8 +161,8 @@ const ConferenceScreenBottomSheet = () => { }, ]; - const voiceFontAction: (voiceFont: VoiceFont) => () => void = (voiceFont) => { - return () => setAudioCaptureMode({ mode: AudioCaptureMode.Standard, noiseReduction: NoiseReductionLevel.Low, voiceFont: voiceFont }) + const voiceFontAction: (voiceFont: VoiceFont) => () => void = (voiceFont) => { + return () => setAudioCaptureMode({ mode: AudioCaptureMode.Standard, noiseReduction: NoiseReductionLevel.Low, voiceFont: voiceFont }) }; const voiceFontOptions: Array<{ text: string; @@ -228,7 +229,7 @@ const ConferenceScreenBottomSheet = () => { }; return ( - + setBottomSheetVisibility(false)}> @@ -345,17 +346,54 @@ const ConferenceScreenBottomSheet = () => {