diff --git a/.editorconfig b/.editorconfig index b8777ff..139e176 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,5 +1,8 @@ [*.{kt,kts}] -ktlint_disabled_rules = no-wildcard-imports,filename +ktlint_standard_no-wildcard-imports = disbaled +ktlint_standard_filename = disabled +ktlint_standard_function-naming = disabled +ktlint_standard_property-naming = disabled ij_kotlin_allow_trailing_comma = false ij_kotlin_allow_trailing_comma_on_call_site = false ij_kotlin_imports_layout = *,java.**,javax.**,kotlin.**,^ @@ -7,7 +10,7 @@ ij_kotlin_packages_to_use_import_on_demand = java.util.*,kotlinx.android.synthet indent_size = 4 indent_style = space insert_final_newline = true -ktlint_code_style = official +ktlint_code_style = ktlint_official ktlint_ignore_back_ticked_identifier = false ktlint_standard_no-wildcard-imports= disabled max_line_length = 120 diff --git a/.github/workflows/run_lint_and_tests.yaml b/.github/workflows/run_lint_and_tests.yaml index 4ba1eb9..0dae1a6 100644 --- a/.github/workflows/run_lint_and_tests.yaml +++ b/.github/workflows/run_lint_and_tests.yaml @@ -8,7 +8,7 @@ jobs: - uses: actions/checkout@v3 - name: Download ktlint - run: curl -sSLO https://github.com/pinterest/ktlint/releases/download/0.47.1/ktlint && chmod a+x ktlint && sudo mv ktlint /usr/local/bin/ + run: curl -sSLO https://github.com/pinterest/ktlint/releases/download/1.1.1/ktlint && chmod a+x ktlint && sudo mv ktlint /usr/local/bin/ - name: Run linter run: ktlint **/*.kt diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTC.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTC.kt index d47a64b..92b1ec4 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTC.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTC.kt @@ -35,224 +35,241 @@ import org.webrtc.Logging * or going inactive. */ class MembraneRTC -private constructor( - private var client: InternalMembraneRTC -) { - /** - * Tries to connect the RTC Engine. If user is accepted then onConnected will be called. - * In other case {@link Callbacks.onConnectError} is invoked. - *

- * @param endpointMetadata - Any information that other endpoints will receive in onEndpointAdded - * after accepting this endpoint - */ - fun connect(endpointMetadata: Metadata) { - client.connect(endpointMetadata) - } - - /** - * Disconnects the client. - *

- * Once the client gets disconnected it should not be reused. New client should be created instead. - */ - fun disconnect() { - client.disconnect() - } + private constructor( + private var client: InternalMembraneRTC + ) { + /** + * Tries to connect the RTC Engine. If user is accepted then onConnected will be called. + * In other case {@link Callbacks.onConnectError} is invoked. + *

+ * @param endpointMetadata - Any information that other endpoints will receive in onEndpointAdded + * after accepting this endpoint + */ + fun connect(endpointMetadata: Metadata) { + client.connect(endpointMetadata) + } - /** - * Feeds media event received from RTC Engine to MembraneWebRTC. - * This function should be called whenever some media event from RTC Engine - * was received and can result in MembraneWebRTC generating some other - * media events. - * @param mediaEvent - String data received over custom signalling layer. - */ - fun receiveMediaEvent(mediaEvent: SerializedMediaEvent) { - client.receiveMediaEvent(mediaEvent) - } + /** + * Disconnects the client. + *

+ * Once the client gets disconnected it should not be reused. New client should be created instead. + */ + fun disconnect() { + client.disconnect() + } - /** - * Creates a video track utilizing device's camera. - *

- * The client assumes that the user has already granted camera permissions. - * - * @param videoParameters a set of target parameters such as camera resolution, frame rate or simulcast configuration - * @param metadata the metadata that will be sent to the Membrane RTC Engine for media negotiation - * @param captureDeviceName the name of the device to start video capture with, you can get device name by using - * `LocalVideoTrack.getCaptureDevices` method - * @return an instance of the video track - */ - fun createVideoTrack( - videoParameters: VideoParameters, - metadata: Metadata, - captureDeviceName: String? = null - ): LocalVideoTrack { - return client.createLocalVideoTrack(videoParameters, metadata, captureDeviceName) - } + /** + * Feeds media event received from RTC Engine to MembraneWebRTC. + * This function should be called whenever some media event from RTC Engine + * was received and can result in MembraneWebRTC generating some other + * media events. + * @param mediaEvent - String data received over custom signalling layer. + */ + fun receiveMediaEvent(mediaEvent: SerializedMediaEvent) { + client.receiveMediaEvent(mediaEvent) + } - /** - * Creates an audio track utilizing device's microphone. - *

- * The client assumes that the user has already granted microphone recording permissions. - * - * @param metadata the metadata that will be sent to the Membrane RTC Engine for media negotiation - * @return an instance of the audio track - */ - fun createAudioTrack(metadata: Metadata): LocalAudioTrack { - return client.createLocalAudioTrack(metadata) - } + /** + * Creates a video track utilizing device's camera. + *

+ * The client assumes that the user has already granted camera permissions. + * + * @param videoParameters a set of target parameters such as camera resolution, frame rate or simulcast configuration + * @param metadata the metadata that will be sent to the Membrane RTC Engine for media negotiation + * @param captureDeviceName the name of the device to start video capture with, you can get device name by using + * `LocalVideoTrack.getCaptureDevices` method + * @return an instance of the video track + */ + fun createVideoTrack( + videoParameters: VideoParameters, + metadata: Metadata, + captureDeviceName: String? = null + ): LocalVideoTrack { + return client.createLocalVideoTrack(videoParameters, metadata, captureDeviceName) + } - /** - * Creates a screen track recording the entire device's screen. - *

- * The method requires a media projection permission to be able to start the recording. The client assumes that the intent is valid. - * - * @param mediaProjectionPermission a valid media projection permission intent that can be used to starting a screen capture - * @param videoParameters a set of target parameters of the screen capture such as resolution, frame rate or simulcast configuration - * @param metadata the metadata that will be sent to the Membrane RTC Engine for media negotiation - * @param onEnd callback that will be invoked once the screen capture ends - * @return an instance of the screencast track - */ - fun createScreencastTrack( - mediaProjectionPermission: Intent, - videoParameters: VideoParameters, - metadata: Metadata, - onEnd: (() -> Unit)? = null - ): LocalScreencastTrack { - return client.createScreencastTrack(mediaProjectionPermission, videoParameters, metadata, onEnd) - } + /** + * Creates an audio track utilizing device's microphone. + *

+ * The client assumes that the user has already granted microphone recording permissions. + * + * @param metadata the metadata that will be sent to the Membrane RTC Engine for media negotiation + * @return an instance of the audio track + */ + fun createAudioTrack(metadata: Metadata): LocalAudioTrack { + return client.createLocalAudioTrack(metadata) + } - /** - * Removes an instance of local track from the client. - * - * @param trackId an id of a valid local track that has been created using the current client - * @return a boolean whether the track has been successfully removed or not - */ - fun removeTrack(trackId: String): Boolean { - return client.removeTrack(trackId) - } + /** + * Creates a screen track recording the entire device's screen. + *

+ * The method requires a media projection permission to be able to start the recording. The client assumes that the intent is valid. + * + * @param mediaProjectionPermission a valid media projection permission intent that can be used to starting a screen capture + * @param videoParameters a set of target parameters of the screen capture such as resolution, frame rate or simulcast configuration + * @param metadata the metadata that will be sent to the Membrane RTC Engine for media negotiation + * @param onEnd callback that will be invoked once the screen capture ends + * @return an instance of the screencast track + */ + fun createScreencastTrack( + mediaProjectionPermission: Intent, + videoParameters: VideoParameters, + metadata: Metadata, + onEnd: (() -> Unit)? = null + ): LocalScreencastTrack { + return client.createScreencastTrack(mediaProjectionPermission, videoParameters, metadata, onEnd) + } - /** - * Sets track encoding that server should send to the client library. - * - * The encoding will be sent whenever it is available. - * If chosen encoding is temporarily unavailable, some other encoding - * will be sent until chosen encoding becomes active again. - * - * @param trackId an id of a remote track - * @param encoding an encoding to receive - */ - fun setTargetTrackEncoding(trackId: String, encoding: TrackEncoding) { - client.setTargetTrackEncoding(trackId, encoding) - } + /** + * Removes an instance of local track from the client. + * + * @param trackId an id of a valid local track that has been created using the current client + * @return a boolean whether the track has been successfully removed or not + */ + fun removeTrack(trackId: String): Boolean { + return client.removeTrack(trackId) + } - /** - * Enables track encoding so that it will be sent to the server. - * - * @param trackId an id of a local track - * @param encoding an encoding that will be enabled - */ - fun enableTrackEncoding(trackId: String, encoding: TrackEncoding) { - client.enableTrackEncoding(trackId, encoding) - } + /** + * Sets track encoding that server should send to the client library. + * + * The encoding will be sent whenever it is available. + * If chosen encoding is temporarily unavailable, some other encoding + * will be sent until chosen encoding becomes active again. + * + * @param trackId an id of a remote track + * @param encoding an encoding to receive + */ + fun setTargetTrackEncoding( + trackId: String, + encoding: TrackEncoding + ) { + client.setTargetTrackEncoding(trackId, encoding) + } - /** - * Disables track encoding so that it will be no longer sent to the server. - * - * @param trackId and id of a local track - * @param encoding an encoding that will be disabled - */ - fun disableTrackEncoding(trackId: String, encoding: TrackEncoding) { - client.disableTrackEncoding(trackId, encoding) - } + /** + * Enables track encoding so that it will be sent to the server. + * + * @param trackId an id of a local track + * @param encoding an encoding that will be enabled + */ + fun enableTrackEncoding( + trackId: String, + encoding: TrackEncoding + ) { + client.enableTrackEncoding(trackId, encoding) + } - /** - * Updates the metadata for the current endpoint. - * @param endpointMetadata Data about this endpoint that other endpoints will receive upon connecting. - * - * If the metadata is different from what is already tracked in the room, the optional - * callback `onEndpointUpdated` will be triggered for other endpoints in the room. - */ - fun updateEndpointMetadata(endpointMetadata: Metadata) { - client.updateEndpointMetadata(endpointMetadata) - } + /** + * Disables track encoding so that it will be no longer sent to the server. + * + * @param trackId and id of a local track + * @param encoding an encoding that will be disabled + */ + fun disableTrackEncoding( + trackId: String, + encoding: TrackEncoding + ) { + client.disableTrackEncoding(trackId, encoding) + } - /** - * Updates the metadata for a specific track. - * @param trackId local track id of audio or video track. - * @param trackMetadata Data about this track that other endpoints will receive upon connecting. - * - * If the metadata is different from what is already tracked in the room, the optional - * callback `onTrackUpdated` will be triggered for other endpoints in the room. - */ - fun updateTrackMetadata(trackId: String, trackMetadata: Metadata) { - client.updateTrackMetadata(trackId, trackMetadata) - } + /** + * Updates the metadata for the current endpoint. + * @param endpointMetadata Data about this endpoint that other endpoints will receive upon connecting. + * + * If the metadata is different from what is already tracked in the room, the optional + * callback `onEndpointUpdated` will be triggered for other endpoints in the room. + */ + fun updateEndpointMetadata(endpointMetadata: Metadata) { + client.updateEndpointMetadata(endpointMetadata) + } - /** - * Updates maximum bandwidth for the track identified by trackId. - * This value directly translates to quality of the stream and, in case of video, to the amount of RTP packets being sent. - * In case trackId points at the simulcast track bandwidth is split between all of the variant streams proportionally to their resolution. - * @param trackId track id of a video track - * @param bandwidthLimit bandwidth in kbps - */ - fun setTrackBandwidth(trackId: String, bandwidthLimit: TrackBandwidthLimit.BandwidthLimit) { - client.setTrackBandwidth(trackId, bandwidthLimit) - } + /** + * Updates the metadata for a specific track. + * @param trackId local track id of audio or video track. + * @param trackMetadata Data about this track that other endpoints will receive upon connecting. + * + * If the metadata is different from what is already tracked in the room, the optional + * callback `onTrackUpdated` will be triggered for other endpoints in the room. + */ + fun updateTrackMetadata( + trackId: String, + trackMetadata: Metadata + ) { + client.updateTrackMetadata(trackId, trackMetadata) + } - /** - * Updates maximum bandwidth for the given simulcast encoding of the given track. - * @param trackId track id of a video track - * @param encoding rid of the encoding - * @param bandwidthLimit bandwidth in kbps - */ - fun setEncodingBandwidth( - trackId: String, - encoding: String, - bandwidthLimit: TrackBandwidthLimit.BandwidthLimit - ) { - client.setEncodingBandwidth(trackId, encoding, bandwidthLimit) - } + /** + * Updates maximum bandwidth for the track identified by trackId. + * This value directly translates to quality of the stream and, in case of video, to the amount of RTP packets being sent. + * In case trackId points at the simulcast track bandwidth is split between all of the variant streams proportionally to their resolution. + * @param trackId track id of a video track + * @param bandwidthLimit bandwidth in kbps + */ + fun setTrackBandwidth( + trackId: String, + bandwidthLimit: TrackBandwidthLimit.BandwidthLimit + ) { + client.setTrackBandwidth(trackId, bandwidthLimit) + } - /** - * Changes severity level of debug logs - * @param severity enum value representing the logging severity - */ - fun changeWebRTCLoggingSeverity(severity: Logging.Severity) { - Logging.enableLogToDebugOutput(severity) - } + /** + * Updates maximum bandwidth for the given simulcast encoding of the given track. + * @param trackId track id of a video track + * @param encoding rid of the encoding + * @param bandwidthLimit bandwidth in kbps + */ + fun setEncodingBandwidth( + trackId: String, + encoding: String, + bandwidthLimit: TrackBandwidthLimit.BandwidthLimit + ) { + client.setEncodingBandwidth(trackId, encoding, bandwidthLimit) + } - /** - * Returns current connection stats - * @return a map containing statistics - */ - fun getStats(): Map { - return client.getStats() - } + /** + * Changes severity level of debug logs + * @param severity enum value representing the logging severity + */ + fun changeWebRTCLoggingSeverity(severity: Logging.Severity) { + Logging.enableLogToDebugOutput(severity) + } - companion object { /** - * Creates an instance of MembraneRTC client. - * - * @param appContext the context of the current application - * @param listener a listener that will receive all notifications emitted by the MembraneRTC - * @param options a set of options defining parameters such as encoder parameters - * @return an instance of the client in connecting state + * Returns current connection stats + * @return a map containing statistics */ - fun create( - appContext: Context, - listener: MembraneRTCListener, - options: CreateOptions = CreateOptions() - ): MembraneRTC { - val ctx = appContext.applicationContext + fun getStats(): Map { + return client.getStats() + } + + companion object { + /** + * Creates an instance of MembraneRTC client. + * + * @param appContext the context of the current application + * @param listener a listener that will receive all notifications emitted by the MembraneRTC + * @param options a set of options defining parameters such as encoder parameters + * @return an instance of the client in connecting state + */ + fun create( + appContext: Context, + listener: MembraneRTCListener, + options: CreateOptions = CreateOptions() + ): MembraneRTC { + val ctx = appContext.applicationContext - val component = DaggerMembraneRTCComponent - .factory() - .create(ctx) + val component = + DaggerMembraneRTCComponent + .factory() + .create(ctx) - val client = component - .membraneRTCFactory() - .create(options, listener, Dispatchers.Default) + val client = + component + .membraneRTCFactory() + .create(options, listener, Dispatchers.Default) - return MembraneRTC(client) + return MembraneRTC(client) + } } } -} diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTCError.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTCError.kt index fcc35fd..799dfc8 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTCError.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTCError.kt @@ -2,7 +2,9 @@ package org.membraneframework.rtc sealed class MembraneRTCError : Exception() { data class RTC(val reason: String) : MembraneRTCError() + data class Transport(val reason: String) : MembraneRTCError() + data class Unknown(val reason: String) : MembraneRTCError() override fun toString(): String { diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTCListener.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTCListener.kt index ff148d6..ec7b1e6 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTCListener.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/MembraneRTCListener.kt @@ -10,7 +10,10 @@ interface MembraneRTCListener { fun onSendMediaEvent(event: SerializedMediaEvent) // Callback invoked when the client has been approved to participate in media exchange. - fun onConnected(endpointID: String, otherEndpoints: List) + fun onConnected( + endpointID: String, + otherEndpoints: List + ) // Called when endpoint of this MembraneRTC instance was removed fun onDisconnected() diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionFactoryWrapper.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionFactoryWrapper.kt index 678e47f..8f2080c 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionFactoryWrapper.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionFactoryWrapper.kt @@ -9,39 +9,39 @@ import org.webrtc.* import org.webrtc.audio.AudioDeviceModule internal class PeerConnectionFactoryWrapper -@AssistedInject constructor( - @Assisted private val createOptions: CreateOptions, - audioDeviceModule: AudioDeviceModule, - eglBase: EglBase, - appContext: Context -) { - @AssistedFactory - interface PeerConnectionFactoryWrapperFactory { - fun create( - createOptions: CreateOptions - ): PeerConnectionFactoryWrapper - } + @AssistedInject + constructor( + @Assisted private val createOptions: CreateOptions, + audioDeviceModule: AudioDeviceModule, + eglBase: EglBase, + appContext: Context + ) { + @AssistedFactory + interface PeerConnectionFactoryWrapperFactory { + fun create(createOptions: CreateOptions): PeerConnectionFactoryWrapper + } - val peerConnectionFactory: PeerConnectionFactory + val peerConnectionFactory: PeerConnectionFactory - init { - PeerConnectionFactory.initialize( - PeerConnectionFactory.InitializationOptions.builder(appContext).createInitializationOptions() - ) + init { + PeerConnectionFactory.initialize( + PeerConnectionFactory.InitializationOptions.builder(appContext).createInitializationOptions() + ) - peerConnectionFactory = - PeerConnectionFactory.builder().setAudioDeviceModule(audioDeviceModule).setVideoEncoderFactory( - SimulcastVideoEncoderFactoryWrapper( - eglBase.eglBaseContext, - createOptions.encoderOptions - ) - ).setVideoDecoderFactory(DefaultVideoDecoderFactory(eglBase.eglBaseContext)).createPeerConnectionFactory() - } + peerConnectionFactory = + PeerConnectionFactory.builder().setAudioDeviceModule(audioDeviceModule).setVideoEncoderFactory( + SimulcastVideoEncoderFactoryWrapper( + eglBase.eglBaseContext, + createOptions.encoderOptions + ) + ).setVideoDecoderFactory(DefaultVideoDecoderFactory(eglBase.eglBaseContext)) + .createPeerConnectionFactory() + } - fun createPeerConnection( - rtcConfig: PeerConnection.RTCConfiguration, - observer: PeerConnection.Observer - ): PeerConnection? { - return peerConnectionFactory.createPeerConnection(rtcConfig, observer) + fun createPeerConnection( + rtcConfig: PeerConnection.RTCConfiguration, + observer: PeerConnection.Observer + ): PeerConnection? { + return peerConnectionFactory.createPeerConnection(rtcConfig, observer) + } } -} diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionListener.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionListener.kt index fc5ada7..a0ae770 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionListener.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionListener.kt @@ -4,6 +4,10 @@ import org.webrtc.IceCandidate import org.webrtc.MediaStreamTrack internal interface PeerConnectionListener { - fun onAddTrack(trackId: String, track: MediaStreamTrack) + fun onAddTrack( + trackId: String, + track: MediaStreamTrack + ) + fun onLocalIceCandidate(candidate: IceCandidate) } diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionManager.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionManager.kt index a6970d2..8474510 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionManager.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/PeerConnectionManager.kt @@ -25,510 +25,551 @@ import java.util.* import kotlin.math.pow internal class PeerConnectionManager -@AssistedInject constructor( - @Assisted private val peerConnectionListener: PeerConnectionListener, - @Assisted private val peerConnectionFactory: PeerConnectionFactoryWrapper -) : PeerConnection.Observer { - - @AssistedFactory - interface PeerConnectionManagerFactory { - fun create( - listener: PeerConnectionListener, - peerConnectionFactory: PeerConnectionFactoryWrapper - ): PeerConnectionManager - } - - private var peerConnection: PeerConnection? = null - private val peerConnectionMutex = Mutex() - private val peerConnectionStats = mutableMapOf() - - private var iceServers: List? = null - private var config: PeerConnection.RTCConfiguration? = null - private var queuedRemoteCandidates: MutableList? = null - private val qrcMutex = Mutex() - private var midToTrackId: Map = HashMap() - - private val coroutineScope: CoroutineScope = - ClosableCoroutineScope(SupervisorJob()) - - private var streamIds: List = listOf(UUID.randomUUID().toString()) - - private fun getSendEncodingsFromConfig(simulcastConfig: SimulcastConfig): List { - val sendEncodings = Constants.simulcastEncodings() - simulcastConfig.activeEncodings.forEach { - sendEncodings[it.ordinal].active = true + @AssistedInject + constructor( + @Assisted private val peerConnectionListener: PeerConnectionListener, + @Assisted private val peerConnectionFactory: PeerConnectionFactoryWrapper + ) : PeerConnection.Observer { + @AssistedFactory + interface PeerConnectionManagerFactory { + fun create( + listener: PeerConnectionListener, + peerConnectionFactory: PeerConnectionFactoryWrapper + ): PeerConnectionManager + } + + private var peerConnection: PeerConnection? = null + private val peerConnectionMutex = Mutex() + private val peerConnectionStats = mutableMapOf() + + private var iceServers: List? = null + private var config: PeerConnection.RTCConfiguration? = null + private var queuedRemoteCandidates: MutableList? = null + private val qrcMutex = Mutex() + private var midToTrackId: Map = HashMap() + + private val coroutineScope: CoroutineScope = + ClosableCoroutineScope(SupervisorJob()) + + private var streamIds: List = listOf(UUID.randomUUID().toString()) + + private fun getSendEncodingsFromConfig(simulcastConfig: SimulcastConfig): List { + val sendEncodings = Constants.simulcastEncodings() + simulcastConfig.activeEncodings.forEach { + sendEncodings[it.ordinal].active = true + } + return sendEncodings } - return sendEncodings - } - suspend fun addTrack(track: LocalTrack) { - addTrack(track, streamIds) - } + suspend fun addTrack(track: LocalTrack) { + addTrack(track, streamIds) + } - private suspend fun addTrack(track: LocalTrack, streamIds: List) { - val videoParameters = - (track as? LocalVideoTrack)?.videoParameters ?: (track as? LocalScreencastTrack)?.videoParameters + private suspend fun addTrack( + track: LocalTrack, + streamIds: List + ) { + val videoParameters = + (track as? LocalVideoTrack)?.videoParameters ?: (track as? LocalScreencastTrack)?.videoParameters - val simulcastConfig = videoParameters?.simulcastConfig - val sendEncodings = - if (track.rtcTrack().kind() == "video" && simulcastConfig != null && simulcastConfig.enabled) { - getSendEncodingsFromConfig(simulcastConfig) - } else { - listOf(RtpParameters.Encoding(null, true, null)) - } + val simulcastConfig = videoParameters?.simulcastConfig + val sendEncodings = + if (track.rtcTrack().kind() == "video" && simulcastConfig != null && simulcastConfig.enabled) { + getSendEncodingsFromConfig(simulcastConfig) + } else { + listOf(RtpParameters.Encoding(null, true, null)) + } - peerConnectionMutex.withLock { - val pc = peerConnection ?: run { - Timber.e("addTrack: Peer connection not yet established") - return - } + peerConnectionMutex.withLock { + val pc = + peerConnection ?: run { + Timber.e("addTrack: Peer connection not yet established") + return + } + + if (videoParameters?.maxBitrate != null) { + applyBitrate(sendEncodings, videoParameters.maxBitrate) + } - if (videoParameters?.maxBitrate != null) { - applyBitrate(sendEncodings, videoParameters.maxBitrate) + pc.addTransceiver( + track.rtcTrack(), + RtpTransceiver.RtpTransceiverDirection.SEND_ONLY, + streamIds, + sendEncodings + ) + pc.enforceSendOnlyDirection() } - - pc.addTransceiver( - track.rtcTrack(), - RtpTransceiver.RtpTransceiverDirection.SEND_ONLY, - streamIds, - sendEncodings - ) - pc.enforceSendOnlyDirection() } - } - private fun applyBitrate(encodings: List, maxBitrate: TrackBandwidthLimit) { - when (maxBitrate) { - is TrackBandwidthLimit.BandwidthLimit -> splitBitrate(encodings, maxBitrate) - is TrackBandwidthLimit.SimulcastBandwidthLimit -> encodings.forEach { - val encodingLimit = maxBitrate.limit[it.rid]?.limit ?: 0 - it.maxBitrateBps = if (encodingLimit == 0) null else encodingLimit * 1024 + private fun applyBitrate( + encodings: List, + maxBitrate: TrackBandwidthLimit + ) { + when (maxBitrate) { + is TrackBandwidthLimit.BandwidthLimit -> splitBitrate(encodings, maxBitrate) + is TrackBandwidthLimit.SimulcastBandwidthLimit -> + encodings.forEach { + val encodingLimit = maxBitrate.limit[it.rid]?.limit ?: 0 + it.maxBitrateBps = if (encodingLimit == 0) null else encodingLimit * 1024 + } } } - } - - private fun splitBitrate(encodings: List, maxBitrate: TrackBandwidthLimit.BandwidthLimit) { - if (encodings.isEmpty()) { - Timber.e("splitBitrate: Attempted to limit bandwidth of the track that doesn't have any encodings") - return - } - if (maxBitrate.limit == 0) { - encodings.forEach { it.maxBitrateBps = null } - return - } - - val k0 = encodings.minByOrNull { it.scaleResolutionDownBy ?: 1.0 } - - val bitrateParts = encodings.sumOf { - ((k0?.scaleResolutionDownBy ?: 1.0) / (it.scaleResolutionDownBy ?: 1.0)).pow( - 2 - ) - } - - val x = maxBitrate.limit / bitrateParts - - encodings.forEach { - it.maxBitrateBps = - (x * ((k0?.scaleResolutionDownBy ?: 1.0) / (it.scaleResolutionDownBy ?: 1.0)).pow(2) * 1024).toInt() - } - } - suspend fun setTrackBandwidth(trackId: String, bandwidthLimit: TrackBandwidthLimit.BandwidthLimit) { - peerConnectionMutex.withLock { - val pc = peerConnection ?: run { - Timber.e("setTrackBandwidth: Peer connection not yet established") + private fun splitBitrate( + encodings: List, + maxBitrate: TrackBandwidthLimit.BandwidthLimit + ) { + if (encodings.isEmpty()) { + Timber.e("splitBitrate: Attempted to limit bandwidth of the track that doesn't have any encodings") return } - val sender = pc.senders.find { it.track()?.id() == trackId } ?: run { - Timber.e("setTrackBandwidth: Invalid trackId: track sender not found") + if (maxBitrate.limit == 0) { + encodings.forEach { it.maxBitrateBps = null } return } - val params = sender.parameters - applyBitrate(params.getEncodings(), bandwidthLimit) + val k0 = encodings.minByOrNull { it.scaleResolutionDownBy ?: 1.0 } - sender.parameters = params - } - } + val bitrateParts = + encodings.sumOf { + ((k0?.scaleResolutionDownBy ?: 1.0) / (it.scaleResolutionDownBy ?: 1.0)).pow( + 2 + ) + } - suspend fun setEncodingBandwidth( - trackId: String, - encoding: String, - bandwidthLimit: TrackBandwidthLimit.BandwidthLimit - ) { - peerConnectionMutex.withLock { - val pc = peerConnection ?: run { - Timber.e("setEncodingBandwidth: Peer connection not yet established") - return - } - val sender = pc.senders.find { it.track()?.id() == trackId } ?: run { - Timber.e("setEncodingBandwidth: Invalid trackId: track sender not found") - return - } + val x = maxBitrate.limit / bitrateParts - val params = sender.parameters - val encodingParameters = params.encodings.find { it.rid == encoding } ?: run { - Timber.e("setEncodingBandwidth: Invalid encoding: encoding not found") - return + encodings.forEach { + it.maxBitrateBps = + (x * ((k0?.scaleResolutionDownBy ?: 1.0) / (it.scaleResolutionDownBy ?: 1.0)).pow(2) * 1024).toInt() } + } - encodingParameters.maxBitrateBps = bandwidthLimit.limit * 1024 + suspend fun setTrackBandwidth( + trackId: String, + bandwidthLimit: TrackBandwidthLimit.BandwidthLimit + ) { + peerConnectionMutex.withLock { + val pc = + peerConnection ?: run { + Timber.e("setTrackBandwidth: Peer connection not yet established") + return + } + val sender = + pc.senders.find { it.track()?.id() == trackId } ?: run { + Timber.e("setTrackBandwidth: Invalid trackId: track sender not found") + return + } + val params = sender.parameters + + applyBitrate(params.getEncodings(), bandwidthLimit) + + sender.parameters = params + } + } - sender.parameters = params + suspend fun setEncodingBandwidth( + trackId: String, + encoding: String, + bandwidthLimit: TrackBandwidthLimit.BandwidthLimit + ) { + peerConnectionMutex.withLock { + val pc = + peerConnection ?: run { + Timber.e("setEncodingBandwidth: Peer connection not yet established") + return + } + val sender = + pc.senders.find { it.track()?.id() == trackId } ?: run { + Timber.e("setEncodingBandwidth: Invalid trackId: track sender not found") + return + } + + val params = sender.parameters + val encodingParameters = + params.encodings.find { it.rid == encoding } ?: run { + Timber.e("setEncodingBandwidth: Invalid encoding: encoding not found") + return + } + + encodingParameters.maxBitrateBps = bandwidthLimit.limit * 1024 + + sender.parameters = params + } } - } - suspend fun removeTrack(trackId: String): Boolean { - peerConnectionMutex.withLock { - val pc = peerConnection ?: run { - Timber.e("removeTrack: Peer connection not yet established") + suspend fun removeTrack(trackId: String): Boolean { + peerConnectionMutex.withLock { + val pc = + peerConnection ?: run { + Timber.e("removeTrack: Peer connection not yet established") + return false + } + pc.transceivers.find { it.sender.track()?.id() == trackId }?.sender?.let { + pc.removeTrack(it) + return true + } return false } - pc.transceivers.find { it.sender.track()?.id() == trackId }?.sender?.let { - pc.removeTrack(it) - return true - } - return false } - } - private suspend fun setupPeerConnection(localTracks: List) { - if (peerConnection != null) { - Timber.e("setupPeerConnection: Peer connection already established!") - return - } + private suspend fun setupPeerConnection(localTracks: List) { + if (peerConnection != null) { + Timber.e("setupPeerConnection: Peer connection already established!") + return + } - assert(config != null) - val config = this.config!! + assert(config != null) + val config = this.config!! - config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN - config.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY - config.candidateNetworkPolicy = PeerConnection.CandidateNetworkPolicy.ALL - config.disableIpv6 = true - config.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED + config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN + config.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY + config.candidateNetworkPolicy = PeerConnection.CandidateNetworkPolicy.ALL + config.disableIpv6 = true + config.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED - val pc = peerConnectionFactory.createPeerConnection(config, this) - ?: throw IllegalStateException("Failed to create a peerConnection") + val pc = + peerConnectionFactory.createPeerConnection(config, this) + ?: throw IllegalStateException("Failed to create a peerConnection") - peerConnectionMutex.withLock { - this@PeerConnectionManager.peerConnection = pc - } + peerConnectionMutex.withLock { + this@PeerConnectionManager.peerConnection = pc + } - localTracks.forEach { - addTrack(it, streamIds) - } + localTracks.forEach { + addTrack(it, streamIds) + } - peerConnectionMutex.withLock { - pc.enforceSendOnlyDirection() + peerConnectionMutex.withLock { + pc.enforceSendOnlyDirection() + } } - } - private suspend fun drainCandidates() { - qrcMutex.withLock { - this.queuedRemoteCandidates?.let { - for (candidate in it) { - this.peerConnection?.addIceCandidate(candidate) + private suspend fun drainCandidates() { + qrcMutex.withLock { + this.queuedRemoteCandidates?.let { + for (candidate in it) { + this.peerConnection?.addIceCandidate(candidate) + } + this.queuedRemoteCandidates = null } - this.queuedRemoteCandidates = null } } - } - private fun prepareIceServers(integratedTurnServers: List) { - if (config != null || iceServers != null) { - Timber.e("prepareIceServers: Config or ice servers are already initialized, skipping the preparation") - return - } + private fun prepareIceServers(integratedTurnServers: List) { + if (config != null || iceServers != null) { + Timber.e("prepareIceServers: Config or ice servers are already initialized, skipping the preparation") + return + } - this.iceServers = integratedTurnServers.map { - val url = listOf( - "turn", - ":", - it.serverAddr, - ":", - it.serverPort.toString(), - "?transport=", - it.transport - ).joinToString("") + this.iceServers = + integratedTurnServers.map { + val url = + listOf( + "turn", + ":", + it.serverAddr, + ":", + it.serverPort.toString(), + "?transport=", + it.transport + ).joinToString("") + + PeerConnection.IceServer.builder(url) + .setUsername(it.username) + .setPassword(it.password) + .createIceServer() + } - PeerConnection.IceServer.builder(url).setUsername(it.username).setPassword(it.password).createIceServer() + val config = PeerConnection.RTCConfiguration(iceServers) + config.iceTransportsType = PeerConnection.IceTransportsType.RELAY + this.config = config } - val config = PeerConnection.RTCConfiguration(iceServers) - config.iceTransportsType = PeerConnection.IceTransportsType.RELAY - this.config = config - } - - private fun addNecessaryTransceivers(tracksTypes: Map) { - val pc = peerConnection ?: return + private fun addNecessaryTransceivers(tracksTypes: Map) { + val pc = peerConnection ?: return - val necessaryAudio = tracksTypes["audio"] ?: 0 - val necessaryVideo = tracksTypes["video"] ?: 0 + val necessaryAudio = tracksTypes["audio"] ?: 0 + val necessaryVideo = tracksTypes["video"] ?: 0 - var lackingAudio = necessaryAudio - var lackingVideo = necessaryVideo + var lackingAudio = necessaryAudio + var lackingVideo = necessaryVideo - pc.transceivers.filter { - it.direction == RtpTransceiver.RtpTransceiverDirection.RECV_ONLY - }.forEach { - val track = it.receiver.track() ?: return@forEach + pc.transceivers.filter { + it.direction == RtpTransceiver.RtpTransceiverDirection.RECV_ONLY + }.forEach { + val track = it.receiver.track() ?: return@forEach - when (track.kind()) { - "audio" -> lackingAudio -= 1 - "video" -> lackingVideo -= 1 + when (track.kind()) { + "audio" -> lackingAudio -= 1 + "video" -> lackingVideo -= 1 + } } - } - Timber.d("peerConnection adding $lackingAudio audio and $lackingVideo video lacking transceivers") + Timber.d("peerConnection adding $lackingAudio audio and $lackingVideo video lacking transceivers") - repeat(lackingAudio) { - pc.addTransceiver(MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO).direction = - RtpTransceiver.RtpTransceiverDirection.RECV_ONLY - } + repeat(lackingAudio) { + pc.addTransceiver(MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO).direction = + RtpTransceiver.RtpTransceiverDirection.RECV_ONLY + } - repeat(lackingVideo) { - pc.addTransceiver(MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO).direction = - RtpTransceiver.RtpTransceiverDirection.RECV_ONLY + repeat(lackingVideo) { + pc.addTransceiver(MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO).direction = + RtpTransceiver.RtpTransceiverDirection.RECV_ONLY + } } - } - suspend fun onSdpAnswer( - sdp: String, - midToTrackId: Map - ) { - peerConnectionMutex.withLock { - val pc = peerConnection ?: return + suspend fun onSdpAnswer( + sdp: String, + midToTrackId: Map + ) { + peerConnectionMutex.withLock { + val pc = peerConnection ?: return - val answer = SessionDescription( - SessionDescription.Type.ANSWER, - sdp - ) + val answer = + SessionDescription( + SessionDescription.Type.ANSWER, + sdp + ) - this@PeerConnectionManager.midToTrackId = midToTrackId + this@PeerConnectionManager.midToTrackId = midToTrackId - pc.setRemoteDescription(answer).onSuccess { - drainCandidates() + pc.setRemoteDescription(answer).onSuccess { + drainCandidates() + } } } - } - - private fun midToTrackIdMapping(localTracks: List): Map { - val pc = peerConnection ?: return emptyMap() - - val mapping = mutableMapOf() - pc.transceivers.forEach { - val trackId = it.sender.track()?.id() ?: return@forEach + private fun midToTrackIdMapping(localTracks: List): Map { + val pc = peerConnection ?: return emptyMap() - if (!localTracks.map { track -> track.id() }.contains(trackId)) return@forEach + val mapping = mutableMapOf() - mapping[it.mid] = trackId - } + pc.transceivers.forEach { + val trackId = it.sender.track()?.id() ?: return@forEach - return mapping - } + if (!localTracks.map { track -> track.id() }.contains(trackId)) return@forEach - data class SdpOffer( - val description: String, - val midToTrackIdMapping: Map - ) + mapping[it.mid] = trackId + } - suspend fun getSdpOffer( - integratedTurnServers: List, - tracksTypes: Map, - localTracks: List - ): SdpOffer { - qrcMutex.withLock { - this@PeerConnectionManager.queuedRemoteCandidates = mutableListOf() + return mapping } - prepareIceServers(integratedTurnServers) - var needsRestart = true - if (peerConnection == null) { - setupPeerConnection(localTracks) - needsRestart = false - } - peerConnectionMutex.withLock { - val pc = peerConnection!! + data class SdpOffer( + val description: String, + val midToTrackIdMapping: Map + ) - if (needsRestart) { - pc.restartIce() + suspend fun getSdpOffer( + integratedTurnServers: List, + tracksTypes: Map, + localTracks: List + ): SdpOffer { + qrcMutex.withLock { + this@PeerConnectionManager.queuedRemoteCandidates = mutableListOf() } + prepareIceServers(integratedTurnServers) - addNecessaryTransceivers(tracksTypes) + var needsRestart = true + if (peerConnection == null) { + setupPeerConnection(localTracks) + needsRestart = false + } + peerConnectionMutex.withLock { + val pc = peerConnection!! - pc.transceivers.forEach { - if (it.direction == RtpTransceiver.RtpTransceiverDirection.SEND_RECV) { - it.direction = RtpTransceiver.RtpTransceiverDirection.SEND_ONLY + if (needsRestart) { + pc.restartIce() } - } - val constraints = MediaConstraints() + addNecessaryTransceivers(tracksTypes) - Timber.i("Creating offer") - val offer = pc.createOffer(constraints).getOrThrow() + pc.transceivers.forEach { + if (it.direction == RtpTransceiver.RtpTransceiverDirection.SEND_RECV) { + it.direction = RtpTransceiver.RtpTransceiverDirection.SEND_ONLY + } + } - Timber.i("Setting local description") - pc.setLocalDescription(offer).getOrThrow() + val constraints = MediaConstraints() - return SdpOffer(offer.description, midToTrackIdMapping(localTracks)) - } - } + Timber.i("Creating offer") + val offer = pc.createOffer(constraints).getOrThrow() - suspend fun setTrackEncoding(trackId: String, trackEncoding: TrackEncoding, enabled: Boolean) { - peerConnectionMutex.withLock { - val sender = peerConnection?.senders?.find { it -> it.track()?.id() == trackId } ?: run { - Timber.e("setTrackEncoding: Invalid trackId $trackId, no track sender found") - return + Timber.i("Setting local description") + pc.setLocalDescription(offer).getOrThrow() + + return SdpOffer(offer.description, midToTrackIdMapping(localTracks)) } - val params = sender.parameters - val encoding = params?.encodings?.find { it.rid == trackEncoding.rid } ?: run { - Timber.e( - "setTrackEncoding: Invalid encoding $trackEncoding," + - "no such encoding found in peer connection" - ) - return + } + + suspend fun setTrackEncoding( + trackId: String, + trackEncoding: TrackEncoding, + enabled: Boolean + ) { + peerConnectionMutex.withLock { + val sender = + peerConnection?.senders?.find { it -> it.track()?.id() == trackId } ?: run { + Timber.e("setTrackEncoding: Invalid trackId $trackId, no track sender found") + return + } + val params = sender.parameters + val encoding = + params?.encodings?.find { it.rid == trackEncoding.rid } ?: run { + Timber.e( + "setTrackEncoding: Invalid encoding $trackEncoding," + + "no such encoding found in peer connection" + ) + return + } + encoding.active = enabled + sender.parameters = params } - encoding.active = enabled - sender.parameters = params } - } - suspend fun onRemoteCandidate(iceCandidate: IceCandidate) { - peerConnectionMutex.withLock { - val pc = peerConnection ?: return - qrcMutex.withLock { - if (this@PeerConnectionManager.queuedRemoteCandidates == null) { - pc.addIceCandidate(iceCandidate) - } else { - this@PeerConnectionManager.queuedRemoteCandidates!!.add(iceCandidate) + suspend fun onRemoteCandidate(iceCandidate: IceCandidate) { + peerConnectionMutex.withLock { + val pc = peerConnection ?: return + qrcMutex.withLock { + if (this@PeerConnectionManager.queuedRemoteCandidates == null) { + pc.addIceCandidate(iceCandidate) + } else { + this@PeerConnectionManager.queuedRemoteCandidates!!.add(iceCandidate) + } } } } - } - suspend fun close() { - peerConnectionMutex.withLock { - peerConnection?.close() + suspend fun close() { + peerConnectionMutex.withLock { + peerConnection?.close() + } } - } - override fun onSignalingChange(state: PeerConnection.SignalingState?) { - Timber.d("Changed signalling state to $state") - } + override fun onSignalingChange(state: PeerConnection.SignalingState?) { + Timber.d("Changed signalling state to $state") + } - override fun onIceConnectionChange(state: PeerConnection.IceConnectionState?) { - Timber.d("Changed ice connection state to $state") - } + override fun onIceConnectionChange(state: PeerConnection.IceConnectionState?) { + Timber.d("Changed ice connection state to $state") + } - override fun onIceConnectionReceivingChange(receiving: Boolean) { - Timber.d("Changed ice connection receiving status to: $receiving") - } + override fun onIceConnectionReceivingChange(receiving: Boolean) { + Timber.d("Changed ice connection receiving status to: $receiving") + } - override fun onIceGatheringChange(state: PeerConnection.IceGatheringState?) { - Timber.d("Change ice gathering state to $state") - } + override fun onIceGatheringChange(state: PeerConnection.IceGatheringState?) { + Timber.d("Change ice gathering state to $state") + } - override fun onIceCandidate(candidate: IceCandidate?) { - if (candidate != null) { - peerConnectionListener.onLocalIceCandidate(candidate) + override fun onIceCandidate(candidate: IceCandidate?) { + if (candidate != null) { + peerConnectionListener.onLocalIceCandidate(candidate) + } } - } - override fun onIceCandidatesRemoved(candidates: Array?) { - Timber.d("Removed ice candidates from connection") - } + override fun onIceCandidatesRemoved(candidates: Array?) { + Timber.d("Removed ice candidates from connection") + } - override fun onAddStream(stream: MediaStream?) { - Timber.d("Added media stream") - } + override fun onAddStream(stream: MediaStream?) { + Timber.d("Added media stream") + } - override fun onRemoveStream(stream: MediaStream?) { - Timber.d("Removed media stream") - } + override fun onRemoveStream(stream: MediaStream?) { + Timber.d("Removed media stream") + } - override fun onAddTrack(receiver: RtpReceiver?, mediaStreams: Array?) { - var trackId: String? = null - coroutineScope.launch { - peerConnectionMutex.withLock { - val pc = peerConnection ?: return@launch + override fun onAddTrack( + receiver: RtpReceiver?, + mediaStreams: Array? + ) { + var trackId: String? = null + coroutineScope.launch { + peerConnectionMutex.withLock { + val pc = peerConnection ?: return@launch - val transceiver = pc.transceivers.find { - it.receiver.id() == receiver?.id() - } ?: return@launch + val transceiver = + pc.transceivers.find { + it.receiver.id() == receiver?.id() + } ?: return@launch - val mid = transceiver.mid + val mid = transceiver.mid - trackId = midToTrackId[mid] ?: run { - Timber.e("onAddTrack: Track with mid=$mid not found") - return@launch + trackId = midToTrackId[mid] ?: run { + Timber.e("onAddTrack: Track with mid=$mid not found") + return@launch + } } + peerConnectionListener.onAddTrack(trackId!!, receiver!!.track()!!) } - peerConnectionListener.onAddTrack(trackId!!, receiver!!.track()!!) } - } - - override fun onRemoveTrack(receiver: RtpReceiver?) { - super.onRemoveTrack(receiver) - } - - override fun onDataChannel(dataChannel: DataChannel?) { - Timber.d("New data channel") - } - - override fun onRenegotiationNeeded() { - Timber.d("Renegotiation needed") - } - - fun getStats(): Map { - peerConnection?.getStats { rtcStatsReport -> extractRelevantStats(rtcStatsReport) } - return peerConnectionStats.toMap() - } - - private fun extractRelevantStats(rp: RTCStatsReport) { - rp.statsMap.values.forEach { - if (it.type == "outbound-rtp") { - val durations = it.members["qualityLimitationDurations"] as? Map<*, *> - val qualityLimitation = QualityLimitationDurations( - durations?.get("bandwidth") as? Double ?: 0.0, - durations?.get("cpu") as? Double ?: 0.0, - durations?.get("none") as? Double ?: 0.0, - durations?.get("other") as? Double ?: 0.0 - ) - - val tmp = RTCOutboundStats( - it.members["kind"] as? String, - it.members["rid"] as? String, - it.members["bytesSent"] as? BigInteger, - it.members["targetBitrate"] as? Double, - it.members["packetsSent"] as? Long, - it.members["framesEncoded"] as? Long, - it.members["framesPerSecond"] as? Double, - it.members["frameWidth"] as? Long, - it.members["frameHeight"] as? Long, - qualityLimitation - ) - peerConnectionStats[it.id as String] = tmp - } else if (it.type == "inbound-rtp") { - val tmp = RTCInboundStats( - it.members["kind"] as? String, - it.members["jitter"] as? Double, - it.members["packetsLost"] as? Int, - it.members["packetsReceived"] as? Long, - it.members["bytesReceived"] as? BigInteger, - it.members["framesReceived"] as? Int, - it.members["frameWidth"] as? Long, - it.members["frameHeight"] as? Long, - it.members["framesPerSecond"] as? Double, - it.members["framesDropped"] as? Long - ) - - peerConnectionStats[it.id as String] = tmp + override fun onRemoveTrack(receiver: RtpReceiver?) { + super.onRemoveTrack(receiver) + } + + override fun onDataChannel(dataChannel: DataChannel?) { + Timber.d("New data channel") + } + + override fun onRenegotiationNeeded() { + Timber.d("Renegotiation needed") + } + + fun getStats(): Map { + peerConnection?.getStats { rtcStatsReport -> extractRelevantStats(rtcStatsReport) } + return peerConnectionStats.toMap() + } + + private fun extractRelevantStats(rp: RTCStatsReport) { + rp.statsMap.values.forEach { + if (it.type == "outbound-rtp") { + val durations = it.members["qualityLimitationDurations"] as? Map<*, *> + val qualityLimitation = + QualityLimitationDurations( + durations?.get("bandwidth") as? Double ?: 0.0, + durations?.get("cpu") as? Double ?: 0.0, + durations?.get("none") as? Double ?: 0.0, + durations?.get("other") as? Double ?: 0.0 + ) + + val tmp = + RTCOutboundStats( + it.members["kind"] as? String, + it.members["rid"] as? String, + it.members["bytesSent"] as? BigInteger, + it.members["targetBitrate"] as? Double, + it.members["packetsSent"] as? Long, + it.members["framesEncoded"] as? Long, + it.members["framesPerSecond"] as? Double, + it.members["frameWidth"] as? Long, + it.members["frameHeight"] as? Long, + qualityLimitation + ) + + peerConnectionStats[it.id as String] = tmp + } else if (it.type == "inbound-rtp") { + val tmp = + RTCInboundStats( + it.members["kind"] as? String, + it.members["jitter"] as? Double, + it.members["packetsLost"] as? Int, + it.members["packetsReceived"] as? Long, + it.members["bytesReceived"] as? BigInteger, + it.members["framesReceived"] as? Int, + it.members["frameWidth"] as? Long, + it.members["frameHeight"] as? Long, + it.members["framesPerSecond"] as? Double, + it.members["framesDropped"] as? Long + ) + + peerConnectionStats[it.id as String] = tmp + } } } } -} /** * Enforces `SEND_ONLY` direction in case of `SEND_RECV` transceivers. diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/RTCEngineCommunication.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/RTCEngineCommunication.kt index 10f18d7..fefa00b 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/RTCEngineCommunication.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/RTCEngineCommunication.kt @@ -12,119 +12,130 @@ import timber.log.Timber import kotlin.math.roundToLong internal class RTCEngineCommunication -@AssistedInject -constructor( - @Assisted - private val engineListener: RTCEngineListener -) { - @AssistedFactory - interface RTCEngineCommunicationFactory { - fun create( - listener: RTCEngineListener - ): RTCEngineCommunication - } + @AssistedInject + constructor( + @Assisted + private val engineListener: RTCEngineListener + ) { + @AssistedFactory + interface RTCEngineCommunicationFactory { + fun create(listener: RTCEngineListener): RTCEngineCommunication + } - fun connect(endpointMetadata: Metadata) { - sendEvent(Connect(endpointMetadata)) - } + fun connect(endpointMetadata: Metadata) { + sendEvent(Connect(endpointMetadata)) + } - fun updateEndpointMetadata(endpointMetadata: Metadata) { - sendEvent(UpdateEndpointMetadata(endpointMetadata)) - } + fun updateEndpointMetadata(endpointMetadata: Metadata) { + sendEvent(UpdateEndpointMetadata(endpointMetadata)) + } - fun updateTrackMetadata(trackId: String, trackMetadata: Metadata) { - sendEvent(UpdateTrackMetadata(trackId, trackMetadata)) - } + fun updateTrackMetadata( + trackId: String, + trackMetadata: Metadata + ) { + sendEvent(UpdateTrackMetadata(trackId, trackMetadata)) + } - fun setTargetTrackEncoding(trackId: String, encoding: TrackEncoding) { - sendEvent( - SelectEncoding( - trackId, - encoding.rid + fun setTargetTrackEncoding( + trackId: String, + encoding: TrackEncoding + ) { + sendEvent( + SelectEncoding( + trackId, + encoding.rid + ) ) - ) - } + } - fun renegotiateTracks() { - sendEvent(RenegotiateTracks()) - } + fun renegotiateTracks() { + sendEvent(RenegotiateTracks()) + } - fun localCandidate(sdp: String, sdpMLineIndex: Int) { - sendEvent( - LocalCandidate( - sdp, - sdpMLineIndex + fun localCandidate( + sdp: String, + sdpMLineIndex: Int + ) { + sendEvent( + LocalCandidate( + sdp, + sdpMLineIndex + ) ) - ) - } + } - fun sdpOffer( - sdp: String, - trackIdToTrackMetadata: Map, - midToTrackId: Map - ) { - sendEvent( - SdpOffer( - sdp, - trackIdToTrackMetadata, - midToTrackId + fun sdpOffer( + sdp: String, + trackIdToTrackMetadata: Map, + midToTrackId: Map + ) { + sendEvent( + SdpOffer( + sdp, + trackIdToTrackMetadata, + midToTrackId + ) ) - ) - } + } - fun disconnect() { - sendEvent(Disconnect()) - } + fun disconnect() { + sendEvent(Disconnect()) + } - private fun sendEvent(event: SendableEvent) { - val serializedMediaEvent = gson.toJson(event.serializeToMap()) - engineListener.onSendMediaEvent(serializedMediaEvent) - } + private fun sendEvent(event: SendableEvent) { + val serializedMediaEvent = gson.toJson(event.serializeToMap()) + engineListener.onSendMediaEvent(serializedMediaEvent) + } - private fun decodeEvent(event: SerializedMediaEvent): ReceivableEvent? { - val type = object : TypeToken>() {}.type + private fun decodeEvent(event: SerializedMediaEvent): ReceivableEvent? { + val type = object : TypeToken>() {}.type - val rawMessage: Map = gson.fromJson(event, type) + val rawMessage: Map = gson.fromJson(event, type) - ReceivableEvent.decode(rawMessage)?.let { - return it - } ?: run { - Timber.d("Failed to decode event $rawMessage") - return null + ReceivableEvent.decode(rawMessage)?.let { + return it + } ?: run { + Timber.d("Failed to decode event $rawMessage") + return null + } } - } - fun onEvent(serializedEvent: SerializedMediaEvent) { - when (val event = decodeEvent(serializedEvent)) { - is Connected -> engineListener.onConnected(event.data.id, event.data.otherEndpoints) - is OfferData -> engineListener.onOfferData(event.data.integratedTurnServers, event.data.tracksTypes) - is EndpointRemoved -> engineListener.onEndpointRemoved(event.data.id) - is EndpointAdded -> engineListener.onEndpointAdded( - Endpoint(event.data.id, event.data.type, event.data.metadata, mapOf()) - ) - is EndpointUpdated -> engineListener.onEndpointUpdated(event.data.id, event.data.metadata) - is RemoteCandidate -> engineListener.onRemoteCandidate( - event.data.candidate, - event.data.sdpMLineIndex, - event.data.sdpMid - ) - is SdpAnswer -> engineListener.onSdpAnswer(event.data.type, event.data.sdp, event.data.midToTrackId) - is TrackUpdated -> engineListener.onTrackUpdated( - event.data.endpointId, - event.data.trackId, - event.data.metadata - ) - is TracksAdded -> engineListener.onTracksAdded(event.data.endpointId, event.data.trackIdToMetadata) - is TracksRemoved -> engineListener.onTracksRemoved(event.data.endpointId, event.data.trackIds) - is EncodingSwitched -> engineListener.onTrackEncodingChanged( - event.data.endpointId, - event.data.trackId, - event.data.encoding, - event.data.reason - ) - is VadNotification -> engineListener.onVadNotification(event.data.trackId, event.data.status) - is BandwidthEstimation -> engineListener.onBandwidthEstimation(event.data.estimation.roundToLong()) - else -> Timber.e("Failed to process unknown event: $event") + fun onEvent(serializedEvent: SerializedMediaEvent) { + when (val event = decodeEvent(serializedEvent)) { + is Connected -> engineListener.onConnected(event.data.id, event.data.otherEndpoints) + is OfferData -> engineListener.onOfferData(event.data.integratedTurnServers, event.data.tracksTypes) + is EndpointRemoved -> engineListener.onEndpointRemoved(event.data.id) + is EndpointAdded -> + engineListener.onEndpointAdded( + Endpoint(event.data.id, event.data.type, event.data.metadata, mapOf()) + ) + is EndpointUpdated -> engineListener.onEndpointUpdated(event.data.id, event.data.metadata) + is RemoteCandidate -> + engineListener.onRemoteCandidate( + event.data.candidate, + event.data.sdpMLineIndex, + event.data.sdpMid + ) + is SdpAnswer -> engineListener.onSdpAnswer(event.data.type, event.data.sdp, event.data.midToTrackId) + is TrackUpdated -> + engineListener.onTrackUpdated( + event.data.endpointId, + event.data.trackId, + event.data.metadata + ) + is TracksAdded -> engineListener.onTracksAdded(event.data.endpointId, event.data.trackIdToMetadata) + is TracksRemoved -> engineListener.onTracksRemoved(event.data.endpointId, event.data.trackIds) + is EncodingSwitched -> + engineListener.onTrackEncodingChanged( + event.data.endpointId, + event.data.trackId, + event.data.encoding, + event.data.reason + ) + is VadNotification -> engineListener.onVadNotification(event.data.trackId, event.data.status) + is BandwidthEstimation -> engineListener.onBandwidthEstimation(event.data.estimation.roundToLong()) + else -> Timber.e("Failed to process unknown event: $event") + } } } -} diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/RTCEngineListener.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/RTCEngineListener.kt index b51f9ab..aa01293 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/RTCEngineListener.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/RTCEngineListener.kt @@ -6,18 +6,66 @@ import org.membraneframework.rtc.utils.Metadata import org.membraneframework.rtc.utils.SerializedMediaEvent internal interface RTCEngineListener { - fun onConnected(endpointID: String, otherEndpoints: List) + fun onConnected( + endpointID: String, + otherEndpoints: List + ) + fun onSendMediaEvent(event: SerializedMediaEvent) + fun onEndpointAdded(endpoint: Endpoint) + fun onEndpointRemoved(endpointId: String) - fun onEndpointUpdated(endpointId: String, endpointMetadata: Metadata) - fun onOfferData(integratedTurnServers: List, tracksTypes: Map) - fun onSdpAnswer(type: String, sdp: String, midToTrackId: Map) - fun onRemoteCandidate(candidate: String, sdpMLineIndex: Int, sdpMid: String?) - fun onTracksAdded(endpointId: String, trackIdToMetadata: Map) - fun onTracksRemoved(endpointId: String, trackIds: List) - fun onTrackUpdated(endpointId: String, trackId: String, metadata: Metadata) - fun onTrackEncodingChanged(endpointId: String, trackId: String, encoding: String, encodingReason: String) - fun onVadNotification(trackId: String, status: String) + + fun onEndpointUpdated( + endpointId: String, + endpointMetadata: Metadata + ) + + fun onOfferData( + integratedTurnServers: List, + tracksTypes: Map + ) + + fun onSdpAnswer( + type: String, + sdp: String, + midToTrackId: Map + ) + + fun onRemoteCandidate( + candidate: String, + sdpMLineIndex: Int, + sdpMid: String? + ) + + fun onTracksAdded( + endpointId: String, + trackIdToMetadata: Map + ) + + fun onTracksRemoved( + endpointId: String, + trackIds: List + ) + + fun onTrackUpdated( + endpointId: String, + trackId: String, + metadata: Metadata + ) + + fun onTrackEncodingChanged( + endpointId: String, + trackId: String, + encoding: String, + encodingReason: String + ) + + fun onVadNotification( + trackId: String, + status: String + ) + fun onBandwidthEstimation(estimation: Long) } diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/SimulcastConfig.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/SimulcastConfig.kt index 2a33dd0..6e5b046 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/SimulcastConfig.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/SimulcastConfig.kt @@ -7,7 +7,9 @@ package org.membraneframework.rtc * `"l"` - original encoding scaled down by 4 */ enum class TrackEncoding(val rid: String) { - L("l"), M("m"), H("h"); + L("l"), + M("m"), + H("h"); companion object { fun fromString(s: String): TrackEncoding? { diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/dagger/MembraneRTCComponent.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/dagger/MembraneRTCComponent.kt index 3124bcf..fe6e3d0 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/dagger/MembraneRTCComponent.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/dagger/MembraneRTCComponent.kt @@ -23,6 +23,8 @@ internal interface MembraneRTCComponent { @Component.Factory interface Factory { - fun create(@BindsInstance appContext: Context): MembraneRTCComponent + fun create( + @BindsInstance appContext: Context + ): MembraneRTCComponent } } diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/dagger/RTCModule.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/dagger/RTCModule.kt index dce0084..9ce13bb 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/dagger/RTCModule.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/dagger/RTCModule.kt @@ -20,61 +20,65 @@ internal object RTCModule { @Singleton @Provides fun audioDeviceModule(appContext: Context): AudioDeviceModule { - val audioRecordErrorCallback = object : JavaAudioDeviceModule.AudioRecordErrorCallback { - override fun onWebRtcAudioRecordInitError(errorMessage: String?) { - Timber.e("onWebRtcAudioRecordInitError: $errorMessage") - } + val audioRecordErrorCallback = + object : JavaAudioDeviceModule.AudioRecordErrorCallback { + override fun onWebRtcAudioRecordInitError(errorMessage: String?) { + Timber.e("onWebRtcAudioRecordInitError: $errorMessage") + } - override fun onWebRtcAudioRecordStartError( - errorCode: JavaAudioDeviceModule.AudioRecordStartErrorCode?, - errorMessage: String? - ) { - Timber.e("onWebRtcAudioRecordStartError: $errorCode. $errorMessage") - } + override fun onWebRtcAudioRecordStartError( + errorCode: JavaAudioDeviceModule.AudioRecordStartErrorCode?, + errorMessage: String? + ) { + Timber.e("onWebRtcAudioRecordStartError: $errorCode. $errorMessage") + } - override fun onWebRtcAudioRecordError(errorMessage: String?) { - Timber.e("onWebRtcAudioRecordError: $errorMessage") + override fun onWebRtcAudioRecordError(errorMessage: String?) { + Timber.e("onWebRtcAudioRecordError: $errorMessage") + } } - } - val audioTrackErrorCallback = object : JavaAudioDeviceModule.AudioTrackErrorCallback { - override fun onWebRtcAudioTrackInitError(errorMessage: String?) { - Timber.e("onWebRtcAudioTrackInitError: $errorMessage") - } + val audioTrackErrorCallback = + object : JavaAudioDeviceModule.AudioTrackErrorCallback { + override fun onWebRtcAudioTrackInitError(errorMessage: String?) { + Timber.e("onWebRtcAudioTrackInitError: $errorMessage") + } - override fun onWebRtcAudioTrackStartError( - errorCode: JavaAudioDeviceModule.AudioTrackStartErrorCode?, - errorMessage: String? - ) { - Timber.e("onWebRtcAudioTrackStartError: $errorCode. $errorMessage") - } + override fun onWebRtcAudioTrackStartError( + errorCode: JavaAudioDeviceModule.AudioTrackStartErrorCode?, + errorMessage: String? + ) { + Timber.e("onWebRtcAudioTrackStartError: $errorCode. $errorMessage") + } - override fun onWebRtcAudioTrackError(errorMessage: String?) { - Timber.e("onWebRtcAudioTrackError: $errorMessage") - } - } - val audioRecordStateCallback: JavaAudioDeviceModule.AudioRecordStateCallback = object : - JavaAudioDeviceModule.AudioRecordStateCallback { - override fun onWebRtcAudioRecordStart() { - Timber.i("Audio recording starts") + override fun onWebRtcAudioTrackError(errorMessage: String?) { + Timber.e("onWebRtcAudioTrackError: $errorMessage") + } } + val audioRecordStateCallback: JavaAudioDeviceModule.AudioRecordStateCallback = + object : + JavaAudioDeviceModule.AudioRecordStateCallback { + override fun onWebRtcAudioRecordStart() { + Timber.i("Audio recording starts") + } - override fun onWebRtcAudioRecordStop() { - Timber.i("Audio recording stops") + override fun onWebRtcAudioRecordStop() { + Timber.i("Audio recording stops") + } } - } // Set audio track state callbacks. - val audioTrackStateCallback: JavaAudioDeviceModule.AudioTrackStateCallback = object : - JavaAudioDeviceModule.AudioTrackStateCallback { - override fun onWebRtcAudioTrackStart() { - Timber.i("Audio playout starts") - } + val audioTrackStateCallback: JavaAudioDeviceModule.AudioTrackStateCallback = + object : + JavaAudioDeviceModule.AudioTrackStateCallback { + override fun onWebRtcAudioTrackStart() { + Timber.i("Audio playout starts") + } - override fun onWebRtcAudioTrackStop() { - Timber.i("Audio playout stops") + override fun onWebRtcAudioTrackStop() { + Timber.i("Audio playout stops") + } } - } return JavaAudioDeviceModule.builder(appContext) .setUseHardwareAcousticEchoCanceler(true) diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/events/Event.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/events/Event.kt index 7d1baa3..cc8a974 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/events/Event.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/events/Event.kt @@ -41,15 +41,16 @@ data class SdpOffer(val type: String, val data: Payload) : SendableEvent() { "custom", mapOf( "type" to "sdpOffer", - "data" to mapOf( - "sdpOffer" to mapOf( - "type" to "offer", - "sdp" to sdp - ), - "trackIdToTrackMetadata" to trackIdToTrackMetadata, - "midToTrackId" to midToTrackId - ) - + "data" to + mapOf( + "sdpOffer" to + mapOf( + "type" to "offer", + "sdp" to sdp + ), + "trackIdToTrackMetadata" to trackIdToTrackMetadata, + "midToTrackId" to midToTrackId + ) ) ) } @@ -60,10 +61,11 @@ data class LocalCandidate(val type: String, val data: Payload) : SendableEvent() "custom", mapOf( "type" to "candidate", - "data" to mapOf( - "candidate" to candidate, - "sdpMLineIndex" to sdpMLineIndex - ) + "data" to + mapOf( + "candidate" to candidate, + "sdpMLineIndex" to sdpMLineIndex + ) ) ) } @@ -84,10 +86,11 @@ data class SelectEncoding(val type: String, val data: Payload) : SendableEvent() "custom", mapOf( "type" to "setTargetTrackVariant", - "data" to mapOf( - "trackId" to trackId, - "variant" to encoding - ) + "data" to + mapOf( + "trackId" to trackId, + "variant" to encoding + ) ) ) } @@ -155,7 +158,6 @@ enum class ReceivableEventType { internal data class BaseReceivableEvent(val type: ReceivableEventType) sealed class ReceivableEvent { - companion object { fun decode(payload: Payload): ReceivableEvent? { try { diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalAudioTrack.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalAudioTrack.kt index 9a5eae8..4ed197b 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalAudioTrack.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalAudioTrack.kt @@ -16,7 +16,6 @@ import java.util.* class LocalAudioTrack constructor( var mediaTrack: org.webrtc.AudioTrack ) : AudioTrack(mediaTrack), LocalTrack { - override fun start() { } @@ -32,20 +31,24 @@ class LocalAudioTrack constructor( } companion object { - fun create(context: Context, factory: PeerConnectionFactory): LocalAudioTrack { + fun create( + context: Context, + factory: PeerConnectionFactory + ): LocalAudioTrack { if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED ) { throw SecurityException("Missing permissions to start recording the audio") } - val items = listOf( - MediaConstraints.KeyValuePair("googEchoCancellation", "true"), - MediaConstraints.KeyValuePair("googAutoGainControl", "true"), - MediaConstraints.KeyValuePair("googHighpassFilter", "true"), - MediaConstraints.KeyValuePair("googNoiseSuppression", "true"), - MediaConstraints.KeyValuePair("googTypingNoiseDetection", "true") - ) + val items = + listOf( + MediaConstraints.KeyValuePair("googEchoCancellation", "true"), + MediaConstraints.KeyValuePair("googAutoGainControl", "true"), + MediaConstraints.KeyValuePair("googHighpassFilter", "true"), + MediaConstraints.KeyValuePair("googNoiseSuppression", "true"), + MediaConstraints.KeyValuePair("googTypingNoiseDetection", "true") + ) val audioConstraints = MediaConstraints() audioConstraints.optional.addAll(items) diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalScreencastTrack.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalScreencastTrack.kt index 4fe13e6..f77ba4e 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalScreencastTrack.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalScreencastTrack.kt @@ -22,130 +22,134 @@ import java.util.* * VideoTrack. */ class LocalScreencastTrack -constructor( - private val source: VideoSource, - mediaTrack: org.webrtc.VideoTrack, - context: Context, - eglBase: EglBase, - private val capturer: ScreenCapturerAndroid, - val videoParameters: VideoParameters, - callback: ProjectionCallback -) : VideoTrack(mediaTrack, eglBase.eglBaseContext), LocalTrack { - private val screencastConnection = ScreencastServiceConnector(context) - private val mutex = Mutex() - private val coroutineScope: CoroutineScope = - ClosableCoroutineScope(SupervisorJob()) - private var isStopped = false - - suspend fun startForegroundService(notificationId: Int?, notification: Notification?) { - mutex.withLock { - if (!isStopped) { - screencastConnection.connect() - screencastConnection.start(notificationId, notification) + constructor( + private val source: VideoSource, + mediaTrack: org.webrtc.VideoTrack, + context: Context, + eglBase: EglBase, + private val capturer: ScreenCapturerAndroid, + val videoParameters: VideoParameters, + callback: ProjectionCallback + ) : VideoTrack(mediaTrack, eglBase.eglBaseContext), LocalTrack { + private val screencastConnection = ScreencastServiceConnector(context) + private val mutex = Mutex() + private val coroutineScope: CoroutineScope = + ClosableCoroutineScope(SupervisorJob()) + private var isStopped = false + + suspend fun startForegroundService( + notificationId: Int?, + notification: Notification? + ) { + mutex.withLock { + if (!isStopped) { + screencastConnection.connect() + screencastConnection.start(notificationId, notification) + } } } - } - override fun start() { - coroutineScope.launch { - mutex.withLock { - if (!isStopped) { - capturer.startCapture( - videoParameters.dimensions.width, - videoParameters.dimensions.height, - videoParameters.maxFps - ) + override fun start() { + coroutineScope.launch { + mutex.withLock { + if (!isStopped) { + capturer.startCapture( + videoParameters.dimensions.width, + videoParameters.dimensions.height, + videoParameters.maxFps + ) + } } } } - } - override fun stop() { - coroutineScope.launch { - mutex.withLock { - isStopped = true - screencastConnection.stop() - capturer.stopCapture() - capturer.dispose() - videoTrack.dispose() - source.dispose() + override fun stop() { + coroutineScope.launch { + mutex.withLock { + isStopped = true + screencastConnection.stop() + capturer.stopCapture() + capturer.dispose() + videoTrack.dispose() + source.dispose() + } } } - } - override fun enabled(): Boolean { - return videoTrack.enabled() - } + override fun enabled(): Boolean { + return videoTrack.enabled() + } - override fun setEnabled(enabled: Boolean) { - videoTrack.setEnabled(enabled) - } + override fun setEnabled(enabled: Boolean) { + videoTrack.setEnabled(enabled) + } /* MediaProjection callback wrapper holding several callbacks that will be invoked once the media projections stops. */ - class ProjectionCallback : MediaProjection.Callback() { - var callbacks: ArrayList<() -> Unit> = arrayListOf() + class ProjectionCallback : MediaProjection.Callback() { + var callbacks: ArrayList<() -> Unit> = arrayListOf() - override fun onStop() { - callbacks.forEach { - it.invoke() + override fun onStop() { + callbacks.forEach { + it.invoke() + } + + callbacks.clear() } - callbacks.clear() + fun addCallback(callback: () -> Unit) { + callbacks.add(callback) + } } - fun addCallback(callback: () -> Unit) { - callbacks.add(callback) - } - } + companion object { + /** + * Creates a screencast track. + * + * @param context: context of the current application + * @param factory: an instance of PeerConnectionFactory used for creating video sources and tracks + * @param simulcastConfig: simulcast configuration. By default simulcast is disabled. + * @param eglBase: an instance of EglBase used for rendering the video + */ + fun create( + context: Context, + factory: PeerConnectionFactory, + eglBase: EglBase, + mediaProjectionPermission: Intent, + videoParameters: VideoParameters, + onStopped: (track: LocalScreencastTrack) -> Unit + ): LocalScreencastTrack { + val source = factory.createVideoSource(true) + + val track = factory.createVideoTrack(UUID.randomUUID().toString(), source) + + val callback = ProjectionCallback() + + val capturer = ScreenCapturerAndroid(mediaProjectionPermission, callback) + + capturer.initialize( + SurfaceTextureHelper.create("ScreenVideoCaptureThread", eglBase.eglBaseContext), + context, + source.capturerObserver + ) + + val localScreencastTrack = + LocalScreencastTrack( + source, + track, + context, + eglBase, + capturer, + videoParameters, + callback + ) + callback.addCallback { + onStopped(localScreencastTrack) + } - companion object { - /** - * Creates a screencast track. - * - * @param context: context of the current application - * @param factory: an instance of PeerConnectionFactory used for creating video sources and tracks - * @param simulcastConfig: simulcast configuration. By default simulcast is disabled. - * @param eglBase: an instance of EglBase used for rendering the video - */ - fun create( - context: Context, - factory: PeerConnectionFactory, - eglBase: EglBase, - mediaProjectionPermission: Intent, - videoParameters: VideoParameters, - onStopped: (track: LocalScreencastTrack) -> Unit - ): LocalScreencastTrack { - val source = factory.createVideoSource(true) - - val track = factory.createVideoTrack(UUID.randomUUID().toString(), source) - - val callback = ProjectionCallback() - - val capturer = ScreenCapturerAndroid(mediaProjectionPermission, callback) - - capturer.initialize( - SurfaceTextureHelper.create("ScreenVideoCaptureThread", eglBase.eglBaseContext), - context, - source.capturerObserver - ) - - val localScreencastTrack = LocalScreencastTrack( - source, - track, - context, - eglBase, - capturer, - videoParameters, - callback - ) - callback.addCallback { - onStopped(localScreencastTrack) + return localScreencastTrack } - - return localScreencastTrack } } -} diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalTrack.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalTrack.kt index 1d21bbd..f00c57c 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalTrack.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalTrack.kt @@ -2,7 +2,10 @@ package org.membraneframework.rtc.media interface LocalTrack : MediaTrackProvider { fun start() + fun stop() + fun enabled(): Boolean + fun setEnabled(enabled: Boolean) } diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalVideoTrack.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalVideoTrack.kt index b1f6a59..2a543e0 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalVideoTrack.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/LocalVideoTrack.kt @@ -11,81 +11,84 @@ import java.util.* * Internally it wraps a WebRTC VideoTrack. */ class LocalVideoTrack -constructor( - mediaTrack: org.webrtc.VideoTrack, - private val capturer: Capturer, - eglBase: EglBase, - val videoParameters: VideoParameters -) : VideoTrack(mediaTrack, eglBase.eglBaseContext), LocalTrack { - - data class CaptureDevice(val deviceName: String, val isFrontFacing: Boolean, val isBackFacing: Boolean) - - companion object { - fun create( - context: Context, - factory: PeerConnectionFactory, - eglBase: EglBase, - videoParameters: VideoParameters, - cameraName: String? = null - ): LocalVideoTrack { - val source = factory.createVideoSource(false) - val track = factory.createVideoTrack(UUID.randomUUID().toString(), source) - - val capturer = CameraCapturer( - context = context, - source = source, - rootEglBase = eglBase, - videoParameters = videoParameters, - cameraName - ) - - return LocalVideoTrack(track, capturer, eglBase, videoParameters) - } - - fun getCaptureDevices(context: Context): List { - val enumerator = if (Camera2Enumerator.isSupported(context)) { - Camera2Enumerator(context) - } else { - Camera1Enumerator(true) + constructor( + mediaTrack: org.webrtc.VideoTrack, + private val capturer: Capturer, + eglBase: EglBase, + val videoParameters: VideoParameters + ) : VideoTrack(mediaTrack, eglBase.eglBaseContext), LocalTrack { + data class CaptureDevice(val deviceName: String, val isFrontFacing: Boolean, val isBackFacing: Boolean) + + companion object { + fun create( + context: Context, + factory: PeerConnectionFactory, + eglBase: EglBase, + videoParameters: VideoParameters, + cameraName: String? = null + ): LocalVideoTrack { + val source = factory.createVideoSource(false) + val track = factory.createVideoTrack(UUID.randomUUID().toString(), source) + + val capturer = + CameraCapturer( + context = context, + source = source, + rootEglBase = eglBase, + videoParameters = videoParameters, + cameraName + ) + + return LocalVideoTrack(track, capturer, eglBase, videoParameters) } - return enumerator.deviceNames.map { name -> - CaptureDevice( - name, - enumerator.isFrontFacing(name), - enumerator.isBackFacing(name) - ) + + fun getCaptureDevices(context: Context): List { + val enumerator = + if (Camera2Enumerator.isSupported(context)) { + Camera2Enumerator(context) + } else { + Camera1Enumerator(true) + } + return enumerator.deviceNames.map { name -> + CaptureDevice( + name, + enumerator.isFrontFacing(name), + enumerator.isBackFacing(name) + ) + } } } - } - override fun start() { - capturer.startCapture() - } + override fun start() { + capturer.startCapture() + } - override fun stop() { - capturer.stopCapture() - } + override fun stop() { + capturer.stopCapture() + } - override fun enabled(): Boolean { - return videoTrack.enabled() - } + override fun enabled(): Boolean { + return videoTrack.enabled() + } - override fun setEnabled(enabled: Boolean) { - videoTrack.setEnabled(enabled) - } + override fun setEnabled(enabled: Boolean) { + videoTrack.setEnabled(enabled) + } - fun flipCamera() { - (capturer as? CameraCapturer)?.flipCamera() - } + fun flipCamera() { + (capturer as? CameraCapturer)?.flipCamera() + } - fun switchCamera(deviceName: String) { - (capturer as? CameraCapturer)?.switchCamera(deviceName) + fun switchCamera(deviceName: String) { + (capturer as? CameraCapturer)?.switchCamera(deviceName) + } } -} interface Capturer { fun capturer(): VideoCapturer + fun startCapture() + fun stopCapture() } @@ -128,11 +131,12 @@ class CameraCapturer constructor( } private fun createCapturer(providedDeviceName: String?) { - val enumerator = if (Camera2Enumerator.isSupported(context)) { - Camera2Enumerator(context) - } else { - Camera1Enumerator(true) - } + val enumerator = + if (Camera2Enumerator.isSupported(context)) { + Camera2Enumerator(context) + } else { + Camera1Enumerator(true) + } var deviceName = providedDeviceName @@ -153,15 +157,17 @@ class CameraCapturer constructor( source.capturerObserver ) - val sizes = enumerator.getSupportedFormats(deviceName) - ?.map { Size(it.width, it.height) } - ?: emptyList() + val sizes = + enumerator.getSupportedFormats(deviceName) + ?.map { Size(it.width, it.height) } + ?: emptyList() - this.size = CameraEnumerationAndroid.getClosestSupportedSize( - sizes, - videoParameters.dimensions.width, - videoParameters.dimensions.height - ) + this.size = + CameraEnumerationAndroid.getClosestSupportedSize( + sizes, + videoParameters.dimensions.width, + videoParameters.dimensions.height + ) } override fun onCameraSwitchDone(isFrontCamera: Boolean) { diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/MediaTrackProvider.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/MediaTrackProvider.kt index 7a74a3f..62ca4a8 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/MediaTrackProvider.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/MediaTrackProvider.kt @@ -4,5 +4,6 @@ import org.webrtc.MediaStreamTrack interface MediaTrackProvider { fun id(): String + fun rtcTrack(): MediaStreamTrack } diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/OnSoundDetectedListener.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/OnSoundDetectedListener.kt index a2997e2..4094dd6 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/OnSoundDetectedListener.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/OnSoundDetectedListener.kt @@ -2,5 +2,6 @@ package org.membraneframework.rtc.media interface OnSoundDetectedListener { fun onSoundDetected(isDetected: Boolean) + fun onSoundVolumeChanged(volume: Int) } diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/RemoteAudioTrack.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/RemoteAudioTrack.kt index 8e98131..eaf9610 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/RemoteAudioTrack.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/RemoteAudioTrack.kt @@ -2,7 +2,6 @@ package org.membraneframework.rtc.media class RemoteAudioTrack constructor( mediaTrack: org.webrtc.AudioTrack - ) : AudioTrack(mediaTrack), RemoteTrack { override fun enabled(): Boolean { return this.audioTrack.enabled() diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/RemoteTrack.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/RemoteTrack.kt index 75a37f1..175f44d 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/RemoteTrack.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/RemoteTrack.kt @@ -2,5 +2,6 @@ package org.membraneframework.rtc.media interface RemoteTrack { fun enabled(): Boolean + fun setEnabled(enabled: Boolean) } diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/SimulcastVideoEncoderFactoryWrapper.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/SimulcastVideoEncoderFactoryWrapper.kt index 8c2cf1b..cb4b29c 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/SimulcastVideoEncoderFactoryWrapper.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/SimulcastVideoEncoderFactoryWrapper.kt @@ -25,7 +25,6 @@ internal open class SimulcastVideoEncoderFactoryWrapper( sharedContext: EglBase.Context?, encoderOptions: EncoderOptions ) : VideoEncoderFactory { - /** * Factory that prioritizes software encoder. * @@ -51,7 +50,6 @@ internal open class SimulcastVideoEncoderFactoryWrapper( */ private class FallbackFactory(private val hardwareVideoEncoderFactory: VideoEncoderFactory) : VideoEncoderFactory { - private val softwareVideoEncoderFactory: VideoEncoderFactory = SoftwareVideoEncoderFactory() override fun createEncoder(info: VideoCodecInfo): VideoEncoder? { @@ -80,18 +78,21 @@ internal open class SimulcastVideoEncoderFactoryWrapper( * - Always calls the encoder on the thread. */ private class StreamEncoderWrapper(private val encoder: VideoEncoder) : VideoEncoder { - val executor: ExecutorService = Executors.newSingleThreadExecutor() var streamSettings: VideoEncoder.Settings? = null - override fun initEncode(settings: VideoEncoder.Settings, callback: VideoEncoder.Callback?): VideoCodecStatus { + override fun initEncode( + settings: VideoEncoder.Settings, + callback: VideoEncoder.Callback? + ): VideoCodecStatus { streamSettings = settings - val future = executor.submit( - Callable { - return@Callable encoder.initEncode(settings, callback) - } - ) + val future = + executor.submit( + Callable { + return@Callable encoder.initEncode(settings, callback) + } + ) return future.get() } @@ -100,45 +101,54 @@ internal open class SimulcastVideoEncoderFactoryWrapper( return future.get() } - override fun encode(frame: VideoFrame, encodeInfo: VideoEncoder.EncodeInfo?): VideoCodecStatus { - val future = executor.submit( - Callable { - if (streamSettings == null) { - return@Callable encoder.encode(frame, encodeInfo) - } else if (frame.buffer.width == streamSettings!!.width) { - return@Callable encoder.encode(frame, encodeInfo) - } else { - // The incoming buffer is different than the streamSettings received in initEncode() - // Need to scale. - val originalBuffer = frame.buffer - // TODO: Do we need to handle when the scale factor is weird? - val adaptedBuffer = originalBuffer.cropAndScale( - 0, - 0, - originalBuffer.width, - originalBuffer.height, - streamSettings!!.width, - streamSettings!!.height - ) - val adaptedFrame = VideoFrame(adaptedBuffer, frame.rotation, frame.timestampNs) - val result = encoder.encode(adaptedFrame, encodeInfo) - adaptedBuffer.release() - return@Callable result + override fun encode( + frame: VideoFrame, + encodeInfo: VideoEncoder.EncodeInfo? + ): VideoCodecStatus { + val future = + executor.submit( + Callable { + if (streamSettings == null) { + return@Callable encoder.encode(frame, encodeInfo) + } else if (frame.buffer.width == streamSettings!!.width) { + return@Callable encoder.encode(frame, encodeInfo) + } else { + // The incoming buffer is different than the streamSettings received in initEncode() + // Need to scale. + val originalBuffer = frame.buffer + // TODO: Do we need to handle when the scale factor is weird? + val adaptedBuffer = + originalBuffer.cropAndScale( + 0, + 0, + originalBuffer.width, + originalBuffer.height, + streamSettings!!.width, + streamSettings!!.height + ) + val adaptedFrame = VideoFrame(adaptedBuffer, frame.rotation, frame.timestampNs) + val result = encoder.encode(adaptedFrame, encodeInfo) + adaptedBuffer.release() + return@Callable result + } } - } - ) + ) return future.get() } - override fun setRateAllocation(allocation: VideoEncoder.BitrateAllocation?, frameRate: Int): VideoCodecStatus { - val future = executor.submit( - Callable { - return@Callable encoder.setRateAllocation( - allocation, - frameRate - ) - } - ) + override fun setRateAllocation( + allocation: VideoEncoder.BitrateAllocation?, + frameRate: Int + ): VideoCodecStatus { + val future = + executor.submit( + Callable { + return@Callable encoder.setRateAllocation( + allocation, + frameRate + ) + } + ) return future.get() } @@ -198,19 +208,21 @@ internal open class SimulcastVideoEncoderFactoryWrapper( private val native: SimulcastVideoEncoderFactory init { - val hardwareVideoEncoderFactory = HardwareVideoEncoderFactory( - sharedContext, - encoderOptions.enableIntelVp8Encoder, - encoderOptions.enableH264HighProfile - ) + val hardwareVideoEncoderFactory = + HardwareVideoEncoderFactory( + sharedContext, + encoderOptions.enableIntelVp8Encoder, + encoderOptions.enableH264HighProfile + ) val softwareVideoEncoderFactory = SoftwareVideoEncoderFactory() primary = StreamEncoderWrapperFactory(hardwareVideoEncoderFactory) fallback = StreamEncoderWrapperFactory(FallbackFactory(primary)) - native = if (encoderOptions.encoderType == EncoderType.HARDWARE) { - SimulcastVideoEncoderFactory(StreamEncoderWrapperFactory(hardwareVideoEncoderFactory), fallback) - } else { - SimulcastVideoEncoderFactory(StreamEncoderWrapperFactory(softwareVideoEncoderFactory), fallback) - } + native = + if (encoderOptions.encoderType == EncoderType.HARDWARE) { + SimulcastVideoEncoderFactory(StreamEncoderWrapperFactory(hardwareVideoEncoderFactory), fallback) + } else { + SimulcastVideoEncoderFactory(StreamEncoderWrapperFactory(softwareVideoEncoderFactory), fallback) + } } override fun createEncoder(info: VideoCodecInfo?): VideoEncoder? { diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/SoundDetection.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/SoundDetection.kt index ee4deee..3043e9b 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/SoundDetection.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/SoundDetection.kt @@ -26,30 +26,36 @@ class SoundDetection { * @param samplingRate The audio sampling rate in Hz. * @param volumeThreshold The threshold value in decibels (dB) above which a sound is considered detected. */ - fun start(monitorInterval: Int = 50, samplingRate: Int = 22050, volumeThreshold: Int = -60) { + fun start( + monitorInterval: Int = 50, + samplingRate: Int = 22050, + volumeThreshold: Int = -60 + ) { if (isRecording) { Timber.w("Sound detection is already in progress. Ignoring the start request.") return } - bufferSize = AudioRecord.getMinBufferSize( - samplingRate, - AudioFormat.CHANNEL_IN_MONO, - AudioFormat.ENCODING_PCM_16BIT - ) - audioRecord = try { - AudioRecord( - MediaRecorder.AudioSource.MIC, + bufferSize = + AudioRecord.getMinBufferSize( samplingRate, AudioFormat.CHANNEL_IN_MONO, - AudioFormat.ENCODING_PCM_16BIT, - bufferSize - ) - } catch (e: SecurityException) { - throw SecurityException( - "Unable to initialize the AudioRecord." + - " Ensure that the recording permission is granted." + AudioFormat.ENCODING_PCM_16BIT ) - } + audioRecord = + try { + AudioRecord( + MediaRecorder.AudioSource.MIC, + samplingRate, + AudioFormat.CHANNEL_IN_MONO, + AudioFormat.ENCODING_PCM_16BIT, + bufferSize + ) + } catch (e: SecurityException) { + throw SecurityException( + "Unable to initialize the AudioRecord." + + " Ensure that the recording permission is granted." + ) + } if (audioRecord?.state == AudioRecord.STATE_INITIALIZED) { audioRecord?.startRecording() @@ -119,7 +125,10 @@ class SoundDetection { * @param volumeThreshold The threshold value in decibels (dB) above which a sound is considered detected. * @param volumeValue The current volume value in decibels (dB). */ - private fun detectSound(volumeThreshold: Int, volumeValue: Int) { + private fun detectSound( + volumeThreshold: Int, + volumeValue: Int + ) { setIsSoundDetected(volumeValue > volumeThreshold) setIsSoundVolumeChanged(volumeValue) } @@ -131,7 +140,10 @@ class SoundDetection { * @param bytesRead The number of bytes read from the audio buffer. * @return The maximum amplitude value from the buffer. */ - private fun getMaxAmplitude(buffer: ShortArray, bytesRead: Int): Int { + private fun getMaxAmplitude( + buffer: ShortArray, + bytesRead: Int + ): Int { return buffer.take(bytesRead).maxOfOrNull { abs(it.toInt()) } ?: 0 } @@ -169,7 +181,10 @@ class SoundDetection { * @param monitorInterval The time interval (in milliseconds) between sound detection checks. * @param volumeThreshold The threshold value in decibels (dB) above which a sound is considered detected. */ - private fun startTimer(monitorInterval: Int, volumeThreshold: Int) { + private fun startTimer( + monitorInterval: Int, + volumeThreshold: Int + ) { timer = Timer() timer?.scheduleAtFixedRate( object : TimerTask() { diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/VideoParameters.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/VideoParameters.kt index a36911b..0b694d4 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/VideoParameters.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/VideoParameters.kt @@ -26,108 +26,126 @@ data class VideoParameters( ) { companion object { // 4:3 aspect ratio - val presetQVGA43 = VideoParameters( - dimensions = Dimensions(width = 240, height = 180), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(90), - maxFps = 10 - ) - val presetVGA43 = VideoParameters( - dimensions = Dimensions(width = 480, height = 360), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(225), - maxFps = 20 - ) - val presetQHD43 = VideoParameters( - dimensions = Dimensions(width = 720, height = 540), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(450), - maxFps = 25 - ) - val presetHD43 = VideoParameters( - dimensions = Dimensions(width = 960, height = 720), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(1_500), - maxFps = 30 - ) - val presetFHD43 = VideoParameters( - dimensions = Dimensions(width = 1440, height = 1080), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(2_800), - maxFps = 30 - ) + val presetQVGA43 = + VideoParameters( + dimensions = Dimensions(width = 240, height = 180), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(90), + maxFps = 10 + ) + val presetVGA43 = + VideoParameters( + dimensions = Dimensions(width = 480, height = 360), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(225), + maxFps = 20 + ) + val presetQHD43 = + VideoParameters( + dimensions = Dimensions(width = 720, height = 540), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(450), + maxFps = 25 + ) + val presetHD43 = + VideoParameters( + dimensions = Dimensions(width = 960, height = 720), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(1_500), + maxFps = 30 + ) + val presetFHD43 = + VideoParameters( + dimensions = Dimensions(width = 1440, height = 1080), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(2_800), + maxFps = 30 + ) // 16:9 aspect ratio - val presetQVGA169 = VideoParameters( - dimensions = Dimensions(width = 320, height = 180), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(120), - maxFps = 10 - ) - val presetVGA169 = VideoParameters( - dimensions = Dimensions(width = 640, height = 360), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(300), - maxFps = 20 - ) - val presetQHD169 = VideoParameters( - dimensions = Dimensions(width = 960, height = 540), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(600), - maxFps = 25 - ) - val presetHD169 = VideoParameters( - dimensions = Dimensions(width = 1280, height = 720), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(2_000), - maxFps = 30 - ) - val presetFHD169 = VideoParameters( - dimensions = Dimensions(width = 1920, height = 1080), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(3_000), - maxFps = 30 - ) + val presetQVGA169 = + VideoParameters( + dimensions = Dimensions(width = 320, height = 180), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(120), + maxFps = 10 + ) + val presetVGA169 = + VideoParameters( + dimensions = Dimensions(width = 640, height = 360), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(300), + maxFps = 20 + ) + val presetQHD169 = + VideoParameters( + dimensions = Dimensions(width = 960, height = 540), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(600), + maxFps = 25 + ) + val presetHD169 = + VideoParameters( + dimensions = Dimensions(width = 1280, height = 720), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(2_000), + maxFps = 30 + ) + val presetFHD169 = + VideoParameters( + dimensions = Dimensions(width = 1920, height = 1080), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(3_000), + maxFps = 30 + ) // Screen share - val presetScreenShareVGA = VideoParameters( - dimensions = Dimensions(width = 640, height = 360), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(200), - maxFps = 3 - ) - val presetScreenShareHD5 = VideoParameters( - dimensions = Dimensions(width = 1280, height = 720), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(400), - maxFps = 5 - ) - val presetScreenShareHD15 = VideoParameters( - dimensions = Dimensions(width = 1280, height = 720), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(1_000), - maxFps = 15 - ) - val presetScreenShareFHD15 = VideoParameters( - dimensions = Dimensions(width = 1920, height = 1080), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(1_500), - maxFps = 15 - ) - val presetScreenShareFHD30 = VideoParameters( - dimensions = Dimensions(width = 1920, height = 1080), - maxBitrate = TrackBandwidthLimit.BandwidthLimit(3_000), - maxFps = 30 - ) + val presetScreenShareVGA = + VideoParameters( + dimensions = Dimensions(width = 640, height = 360), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(200), + maxFps = 3 + ) + val presetScreenShareHD5 = + VideoParameters( + dimensions = Dimensions(width = 1280, height = 720), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(400), + maxFps = 5 + ) + val presetScreenShareHD15 = + VideoParameters( + dimensions = Dimensions(width = 1280, height = 720), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(1_000), + maxFps = 15 + ) + val presetScreenShareFHD15 = + VideoParameters( + dimensions = Dimensions(width = 1920, height = 1080), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(1_500), + maxFps = 15 + ) + val presetScreenShareFHD30 = + VideoParameters( + dimensions = Dimensions(width = 1920, height = 1080), + maxBitrate = TrackBandwidthLimit.BandwidthLimit(3_000), + maxFps = 30 + ) - val presets43 = listOf( - presetQVGA43, - presetVGA43, - presetQHD43, - presetHD43, - presetFHD43 - ) + val presets43 = + listOf( + presetQVGA43, + presetVGA43, + presetQHD43, + presetHD43, + presetFHD43 + ) - val presets169 = listOf( - presetQVGA169, - presetVGA169, - presetQHD169, - presetHD169, - presetFHD169 - ) + val presets169 = + listOf( + presetQVGA169, + presetVGA169, + presetQHD169, + presetHD169, + presetFHD169 + ) - val presetsScreenShare = listOf( - presetScreenShareVGA, - presetScreenShareHD5, - presetScreenShareHD15, - presetScreenShareFHD15, - presetScreenShareFHD30 - ) + val presetsScreenShare = + listOf( + presetScreenShareVGA, + presetScreenShareHD5, + presetScreenShareHD15, + presetScreenShareFHD15, + presetScreenShareFHD30 + ) } } diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/screencast/ScreencastService.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/screencast/ScreencastService.kt index e372650..8e31761 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/screencast/ScreencastService.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/screencast/ScreencastService.kt @@ -22,29 +22,34 @@ internal class ScreencastService : Service() { return binder } - fun start(notificationId: Int?, notification: Notification?) { - val properNotification = if (notification != null) { - notification - } else { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { - createNotificationChannel() - } + fun start( + notificationId: Int?, + notification: Notification? + ) { + val properNotification = + if (notification != null) { + notification + } else { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + createNotificationChannel() + } - NotificationCompat.Builder(this, DEFAULT_CHANNEL_ID) - .setPriority(NotificationCompat.PRIORITY_DEFAULT) - .build() - } + NotificationCompat.Builder(this, DEFAULT_CHANNEL_ID) + .setPriority(NotificationCompat.PRIORITY_DEFAULT) + .build() + } startForeground(notificationId ?: DEFAULT_NOTIFICATION_ID, properNotification) } @RequiresApi(Build.VERSION_CODES.O) private fun createNotificationChannel() { - val channel = NotificationChannel( - DEFAULT_CHANNEL_ID, - "Screen Capture", - NotificationManager.IMPORTANCE_LOW - ) + val channel = + NotificationChannel( + DEFAULT_CHANNEL_ID, + "Screen Capture", + NotificationManager.IMPORTANCE_LOW + ) val service = getSystemService(Context.NOTIFICATION_SERVICE) as NotificationManager diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/screencast/ScreencastServiceConnector.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/screencast/ScreencastServiceConnector.kt index 86d446c..9f9cc86 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/media/screencast/ScreencastServiceConnector.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/media/screencast/ScreencastServiceConnector.kt @@ -17,23 +17,27 @@ internal class ScreencastServiceConnector(private val context: Context) { private val awaitingConnects = mutableSetOf>() - private val connection = object : ServiceConnection { - override fun onServiceConnected(name: ComponentName?, binder: IBinder?) { - synchronized(this@ScreencastServiceConnector) { - connected = true - service = (binder as ScreencastService.ScreencastBinder).service + private val connection = + object : ServiceConnection { + override fun onServiceConnected( + name: ComponentName?, + binder: IBinder? + ) { + synchronized(this@ScreencastServiceConnector) { + connected = true + service = (binder as ScreencastService.ScreencastBinder).service - onConnected() + onConnected() + } } - } - override fun onServiceDisconnected(p0: ComponentName?) { - synchronized(this@ScreencastServiceConnector) { - connected = false - service = null + override fun onServiceDisconnected(p0: ComponentName?) { + synchronized(this@ScreencastServiceConnector) { + connected = false + service = null + } } } - } private fun onConnected() { awaitingConnects.forEach { @@ -43,7 +47,10 @@ internal class ScreencastServiceConnector(private val context: Context) { awaitingConnects.clear() } - fun start(notificationId: Int? = null, notification: Notification? = null) { + fun start( + notificationId: Int? = null, + notification: Notification? = null + ) { synchronized(this) { service?.start(notificationId, notification) } diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/models/TrackContext.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/models/TrackContext.kt index 0cad3bb..430c3c0 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/models/TrackContext.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/models/TrackContext.kt @@ -46,7 +46,10 @@ class TrackContext(track: RemoteTrack?, val endpoint: Endpoint, val trackId: Str var encodingReason: EncodingReason? = null private set - internal fun setEncoding(encoding: TrackEncoding, encodingReason: EncodingReason) { + internal fun setEncoding( + encoding: TrackEncoding, + encodingReason: EncodingReason + ) { this.encoding = encoding this.encodingReason = encodingReason onTrackEncodingChangeListener?.let { onTrackEncodingChangeListener?.onEncodingChanged(this) } diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/transport/PhoenixTransport.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/transport/PhoenixTransport.kt index 75ae814..6b6e4b6 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/transport/PhoenixTransport.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/transport/PhoenixTransport.kt @@ -11,7 +11,9 @@ import timber.log.Timber sealed class PhoenixTransportError : Exception() { data class Unauthorized(val reason: String) : PhoenixTransportError() + data class ConnectionError(val reason: String) : PhoenixTransportError() + data class Unexpected(val reason: String) : PhoenixTransportError() override fun toString(): String { @@ -31,7 +33,9 @@ sealed class PhoenixTransportError : Exception() { */ interface PhoenixTransportListener { fun onEvent(event: SerializedMediaEvent) + fun onError(error: PhoenixTransportError) + fun onClose() } @@ -61,17 +65,20 @@ class PhoenixTransport constructor( var socketRefs: Array = emptyArray() suspendCancellableCoroutine { continuation -> - val openRef = socket!!.onOpen { - continuation.resumeWith(Result.success(Unit)) - } + val openRef = + socket!!.onOpen { + continuation.resumeWith(Result.success(Unit)) + } - val errorRef = socket!!.onError { error, _ -> - continuation.cancel(PhoenixTransportError.ConnectionError(error.toString())) - } + val errorRef = + socket!!.onError { error, _ -> + continuation.cancel(PhoenixTransportError.ConnectionError(error.toString())) + } - val closeRef = socket!!.onClose { - continuation.cancel(PhoenixTransportError.ConnectionError("closed")) - } + val closeRef = + socket!!.onClose { + continuation.cancel(PhoenixTransportError.ConnectionError("closed")) + } socketRefs += openRef socketRefs += errorRef @@ -123,9 +130,10 @@ class PhoenixTransport constructor( } fun send(event: SerializedMediaEvent) { - val payload = mapOf( - "data" to event - ) + val payload = + mapOf( + "data" to event + ) channel?.push("mediaEvent", payload) } } diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/ui/VideoTextureViewRenderer.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/ui/VideoTextureViewRenderer.kt index 99e1699..b4bc202 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/ui/VideoTextureViewRenderer.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/ui/VideoTextureViewRenderer.kt @@ -79,7 +79,7 @@ open class VideoTextureViewRenderer : this.rendererEvents = rendererEvents rotatedFrameWidth = 0 rotatedFrameHeight = 0 - eglRenderer.init(sharedContext, this /* rendererEvents */, configAttributes, drawer) + eglRenderer.init(sharedContext, this, configAttributes, drawer) } /** @@ -101,7 +101,11 @@ open class VideoTextureViewRenderer : * required. * @param drawer Custom drawer to use for this frame listener. */ - fun addFrameListener(listener: EglRenderer.FrameListener?, scale: Float, drawerParam: RendererCommon.GlDrawer?) { + fun addFrameListener( + listener: EglRenderer.FrameListener?, + scale: Float, + drawerParam: RendererCommon.GlDrawer? + ) { eglRenderer.addFrameListener(listener, scale, drawerParam) } @@ -114,7 +118,10 @@ open class VideoTextureViewRenderer : * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is * required. */ - fun addFrameListener(listener: EglRenderer.FrameListener?, scale: Float) { + fun addFrameListener( + listener: EglRenderer.FrameListener?, + scale: Float + ) { eglRenderer.addFrameListener(listener, scale) } @@ -152,7 +159,10 @@ open class VideoTextureViewRenderer : requestLayout() } - fun setScalingType(scalingTypeMatchOrientation: ScalingType?, scalingTypeMismatchOrientation: ScalingType?) { + fun setScalingType( + scalingTypeMatchOrientation: ScalingType?, + scalingTypeMismatchOrientation: ScalingType? + ) { ThreadUtils.checkIsOnMainThread() videoLayoutMeasure.setScalingType( @@ -181,7 +191,10 @@ open class VideoTextureViewRenderer : } // View layout interface. - override fun onMeasure(widthSpec: Int, heightSpec: Int) { + override fun onMeasure( + widthSpec: Int, + heightSpec: Int + ) { ThreadUtils.checkIsOnMainThread() val size = @@ -191,16 +204,23 @@ open class VideoTextureViewRenderer : logD("onMeasure() New size: ${size.x}x${size.y}") } - override fun onLayout(changed: Boolean, left: Int, top: Int, right: Int, bottom: Int) { + override fun onLayout( + changed: Boolean, + left: Int, + top: Int, + right: Int, + bottom: Int + ) { ThreadUtils.checkIsOnMainThread() - val aspectRatio = when (this.scalingType) { - ScalingType.SCALE_ASPECT_FIT -> - rotatedFrameWidth.toFloat() / max(rotatedFrameHeight, 1) + val aspectRatio = + when (this.scalingType) { + ScalingType.SCALE_ASPECT_FIT -> + rotatedFrameWidth.toFloat() / max(rotatedFrameHeight, 1) - else -> - (right - left) / (bottom - top).toFloat() - } + else -> + (right - left) / (bottom - top).toFloat() + } eglRenderer.setLayoutAspectRatio(aspectRatio) updateSurfaceSize() @@ -262,7 +282,10 @@ open class VideoTextureViewRenderer : /** * Sets the TextureView transform to preserve the aspect ratio of the video. */ - private fun adjustAspectRatio(videoWidth: Float, videoHeight: Float) { + private fun adjustAspectRatio( + videoWidth: Float, + videoHeight: Float + ) { val viewWidth = width val viewHeight = height val aspectRatio = videoHeight / videoWidth @@ -304,10 +327,20 @@ open class VideoTextureViewRenderer : } override fun surfaceDestroyed(holder: SurfaceHolder) {} - override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {} + + override fun surfaceChanged( + holder: SurfaceHolder, + format: Int, + width: Int, + height: Int + ) {} // TextureView.SurfaceTextureListener implementation - override fun onSurfaceTextureAvailable(surface: SurfaceTexture, i: Int, i1: Int) { + override fun onSurfaceTextureAvailable( + surface: SurfaceTexture, + i: Int, + i1: Int + ) { ThreadUtils.checkIsOnMainThread() eglRenderer.createEglSurface(Surface(surfaceTexture)) surfaceHeight = 0 @@ -315,7 +348,11 @@ open class VideoTextureViewRenderer : updateSurfaceSize() } - override fun onSurfaceTextureSizeChanged(surface: SurfaceTexture, width: Int, height: Int) { + override fun onSurfaceTextureSizeChanged( + surface: SurfaceTexture, + width: Int, + height: Int + ) { ThreadUtils.checkIsOnMainThread() logD("surfaceChanged: size: $width x $height") @@ -353,7 +390,11 @@ open class VideoTextureViewRenderer : rendererEvents?.onFirstFrameRendered() } - override fun onFrameResolutionChanged(videoWidth: Int, videoHeight: Int, rotation: Int) { + override fun onFrameResolutionChanged( + videoWidth: Int, + videoHeight: Int, + rotation: Int + ) { logD("Resolution changed to $videoWidth x $videoHeight with rotation of $rotation") rendererEvents?.onFrameResolutionChanged(videoWidth, videoHeight, rotation) diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/utils/SuspendableSdpObserver.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/utils/SuspendableSdpObserver.kt index 99b1580..228668e 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/utils/SuspendableSdpObserver.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/utils/SuspendableSdpObserver.kt @@ -58,25 +58,27 @@ internal class SuspendableSdpObserver : SdpObserver { } } - suspend fun awaitCreate() = suspendCoroutine> { cont -> - if (returnedResults) throw IllegalStateException("observer already returned") - createCont = cont - - pendingCreate?.let { - cont.resume(it) - returnedResults = true + suspend fun awaitCreate() = + suspendCoroutine> { cont -> + if (returnedResults) throw IllegalStateException("observer already returned") + createCont = cont + + pendingCreate?.let { + cont.resume(it) + returnedResults = true + } } - } - suspend fun awaitSet() = suspendCoroutine> { cont -> - if (returnedResults) throw IllegalStateException("observer already returned") - setCont = cont + suspend fun awaitSet() = + suspendCoroutine> { cont -> + if (returnedResults) throw IllegalStateException("observer already returned") + setCont = cont - pendingSet?.let { - cont.resume(it) - returnedResults = true + pendingSet?.let { + cont.resume(it) + returnedResults = true + } } - } } internal suspend fun PeerConnection.createOffer(constraints: MediaConstraints): Result { diff --git a/MembraneRTC/src/main/java/org/membraneframework/rtc/utils/TimberDebugTree.kt b/MembraneRTC/src/main/java/org/membraneframework/rtc/utils/TimberDebugTree.kt index af93bfc..b4b345c 100644 --- a/MembraneRTC/src/main/java/org/membraneframework/rtc/utils/TimberDebugTree.kt +++ b/MembraneRTC/src/main/java/org/membraneframework/rtc/utils/TimberDebugTree.kt @@ -5,7 +5,12 @@ import timber.log.Timber class TimberDebugTree : Timber.DebugTree() { private val CALL_STACK_INDEX = 5 - override fun log(priority: Int, tag: String?, message: String, t: Throwable?) { + override fun log( + priority: Int, + tag: String?, + message: String, + t: Throwable? + ) { val stackTrace = Throwable().stackTrace if (stackTrace.size <= CALL_STACK_INDEX) { super.log(priority, tag, message, t) diff --git a/MembraneRTC/src/test/java/org/membraneframework/rtc/test/EndpointConnectionManagerTest.kt b/MembraneRTC/src/test/java/org/membraneframework/rtc/test/EndpointConnectionManagerTest.kt index 2e98549..31bd338 100644 --- a/MembraneRTC/src/test/java/org/membraneframework/rtc/test/EndpointConnectionManagerTest.kt +++ b/MembraneRTC/src/test/java/org/membraneframework/rtc/test/EndpointConnectionManagerTest.kt @@ -46,137 +46,150 @@ class EndpointConnectionManagerTest { @OptIn(ExperimentalCoroutinesApi::class) @Test - fun createsOffer() = runTest { - val offer = manager.getSdpOffer(emptyList(), emptyMap(), emptyList()) + fun createsOffer() = + runTest { + val offer = manager.getSdpOffer(emptyList(), emptyMap(), emptyList()) - assertNotNull(offer) - assertEquals("test_description", offer.description) - } + assertNotNull(offer) + assertEquals("test_description", offer.description) + } @OptIn(ExperimentalCoroutinesApi::class) @Test - fun addsAudioTrack() = runTest { - val audioTrack = LocalAudioTrack(mockk(relaxed = true)) - manager.getSdpOffer(emptyList(), emptyMap(), listOf(audioTrack)) - - verify(exactly = 1) { - endpointConnectionMock.addTransceiver( - audioTrack.mediaTrack, - eq(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY), - match { it.size == 1 }, - withArg { - assertEquals("should be just 1 encoding", 1, it.size) - assertNull("without rid", it[0].rid) - } - ) + fun addsAudioTrack() = + runTest { + val audioTrack = LocalAudioTrack(mockk(relaxed = true)) + manager.getSdpOffer(emptyList(), emptyMap(), listOf(audioTrack)) + + verify(exactly = 1) { + endpointConnectionMock.addTransceiver( + audioTrack.mediaTrack, + eq(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY), + match { it.size == 1 }, + withArg { + assertEquals("should be just 1 encoding", 1, it.size) + assertNull("without rid", it[0].rid) + } + ) + } } - } @OptIn(ExperimentalCoroutinesApi::class) @Test - fun addsVideoTrack() = runTest { - val mediaTrack: VideoTrack = mockk(relaxed = true) - - every { mediaTrack.kind() } returns "video" - - val videoTrack = LocalVideoTrack( - mediaTrack, - mockk(relaxed = true), - mockk(relaxed = true), - VideoParameters.presetFHD169 - ) - - manager.getSdpOffer(emptyList(), emptyMap(), listOf(videoTrack)) - - verify(exactly = 1) { - endpointConnectionMock.addTransceiver( - videoTrack.rtcTrack(), - eq(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY), - match { it.size == 1 }, - withArg { - assertEquals("should be just 1 encoding", 1, it.size) - assertNull("without rid", it[0].rid) - } - ) + fun addsVideoTrack() = + runTest { + val mediaTrack: VideoTrack = mockk(relaxed = true) + + every { mediaTrack.kind() } returns "video" + + val videoTrack = + LocalVideoTrack( + mediaTrack, + mockk(relaxed = true), + mockk(relaxed = true), + VideoParameters.presetFHD169 + ) + + manager.getSdpOffer(emptyList(), emptyMap(), listOf(videoTrack)) + + verify(exactly = 1) { + endpointConnectionMock.addTransceiver( + videoTrack.rtcTrack(), + eq(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY), + match { it.size == 1 }, + withArg { + assertEquals("should be just 1 encoding", 1, it.size) + assertNull("without rid", it[0].rid) + } + ) + } } - } @OptIn(ExperimentalCoroutinesApi::class) @Test - fun simulcastConfigIsSet() = runTest { - val videoParameters = VideoParameters.presetFHD169.copy( - simulcastConfig = SimulcastConfig( - true, - listOf(TrackEncoding.H, TrackEncoding.L) - ) - ) - - val mediaTrack: VideoTrack = mockk(relaxed = true) - - every { mediaTrack.kind() } returns "video" - - val videoTrack = LocalVideoTrack( - mediaTrack, - mockk(relaxed = true), - mockk(relaxed = true), - videoParameters - ) - - manager.getSdpOffer(emptyList(), emptyMap(), listOf(videoTrack)) - - verify(exactly = 1) { - endpointConnectionMock.addTransceiver( - videoTrack.rtcTrack(), - eq(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY), - any(), - withArg { - assertEquals("Should be 3 encodings", 3, it.size) - - assertEquals("first encoding should have rid=l", "l", it[0].rid) - assertTrue("l encoding should be active", it[0].active) - assertEquals("l layer should be 4x smaller", it[0].scaleResolutionDownBy, 4.0) - - assertEquals("first encoding should have rid=m", "m", it[1].rid) - assertFalse("m encoding should not be active", it[1].active) - assertEquals("m layer should be 2x smaller", it[1].scaleResolutionDownBy, 2.0) - - assertEquals("third encoding should have rid=h", "h", it[2].rid) - assertTrue("h encoding should be active", it[2].active) - assertEquals("h layer should have original size", it[2].scaleResolutionDownBy, 1.0) - } - ) + fun simulcastConfigIsSet() = + runTest { + val videoParameters = + VideoParameters.presetFHD169.copy( + simulcastConfig = + SimulcastConfig( + true, + listOf(TrackEncoding.H, TrackEncoding.L) + ) + ) + + val mediaTrack: VideoTrack = mockk(relaxed = true) + + every { mediaTrack.kind() } returns "video" + + val videoTrack = + LocalVideoTrack( + mediaTrack, + mockk(relaxed = true), + mockk(relaxed = true), + videoParameters + ) + + manager.getSdpOffer(emptyList(), emptyMap(), listOf(videoTrack)) + + verify(exactly = 1) { + endpointConnectionMock.addTransceiver( + videoTrack.rtcTrack(), + eq(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY), + any(), + withArg { + assertEquals("Should be 3 encodings", 3, it.size) + + assertEquals("first encoding should have rid=l", "l", it[0].rid) + assertTrue("l encoding should be active", it[0].active) + assertEquals("l layer should be 4x smaller", it[0].scaleResolutionDownBy, 4.0) + + assertEquals("first encoding should have rid=m", "m", it[1].rid) + assertFalse("m encoding should not be active", it[1].active) + assertEquals("m layer should be 2x smaller", it[1].scaleResolutionDownBy, 2.0) + + assertEquals("third encoding should have rid=h", "h", it[2].rid) + assertTrue("h encoding should be active", it[2].active) + assertEquals("h layer should have original size", it[2].scaleResolutionDownBy, 1.0) + } + ) + } } - } @OptIn(ExperimentalCoroutinesApi::class) @Test - fun setTrackBandwidth() = runTest { - val h = Encoding("h", true, 1.0) - val m = Encoding("m", true, 2.0) - val l = Encoding("l", true, 4.0) - - every { endpointConnectionMock.senders } returns listOf( - mockk(relaxed = true) { - every { parameters } returns mockk(relaxed = true) { - every { track()?.id() } returns "dummy_track" - } - }, - mockk(relaxed = true) { - every { parameters } returns mockk(relaxed = true) { - every { track()?.id() } returns "track_id" - every { getEncodings() } returns listOf( - h, - m, - l - ) - } - } - ) - manager.getSdpOffer(emptyList(), emptyMap(), emptyList()) - assertNull("layers have no maxBitrateBps", h.maxBitrateBps) - manager.setTrackBandwidth("track_id", TrackBandwidthLimit.BandwidthLimit(1000)) - assertEquals("h layer has correct maxBitrateBps", 780190, h.maxBitrateBps) - assertEquals("m layer has correct maxBitrateBps", 195047, m.maxBitrateBps) - assertEquals("l layer has correct maxBitrateBps", 48761, l.maxBitrateBps) - } + fun setTrackBandwidth() = + runTest { + val h = Encoding("h", true, 1.0) + val m = Encoding("m", true, 2.0) + val l = Encoding("l", true, 4.0) + + every { endpointConnectionMock.senders } returns + listOf( + mockk(relaxed = true) { + every { parameters } returns + mockk(relaxed = true) { + every { track()?.id() } returns "dummy_track" + } + }, + mockk(relaxed = true) { + every { parameters } returns + mockk(relaxed = true) { + every { track()?.id() } returns "track_id" + every { getEncodings() } returns + listOf( + h, + m, + l + ) + } + } + ) + manager.getSdpOffer(emptyList(), emptyMap(), emptyList()) + assertNull("layers have no maxBitrateBps", h.maxBitrateBps) + manager.setTrackBandwidth("track_id", TrackBandwidthLimit.BandwidthLimit(1000)) + assertEquals("h layer has correct maxBitrateBps", 780190, h.maxBitrateBps) + assertEquals("m layer has correct maxBitrateBps", 195047, m.maxBitrateBps) + assertEquals("l layer has correct maxBitrateBps", 48761, l.maxBitrateBps) + } } diff --git a/app/src/main/java/com/dscout/membranevideoroomdemo/MainActivity.kt b/app/src/main/java/com/dscout/membranevideoroomdemo/MainActivity.kt index 3295d3e..34131cd 100644 --- a/app/src/main/java/com/dscout/membranevideoroomdemo/MainActivity.kt +++ b/app/src/main/java/com/dscout/membranevideoroomdemo/MainActivity.kt @@ -78,9 +78,10 @@ class MainActivity : AppCompatActivity() { backgroundColor = Blue.darker(0.5f) ) { Column( - modifier = Modifier - .fillMaxSize() - .verticalScroll(scrollableState), + modifier = + Modifier + .fillMaxSize() + .verticalScroll(scrollableState), horizontalAlignment = Alignment.CenterHorizontally, verticalArrangement = Arrangement.Center ) { @@ -89,9 +90,10 @@ class MainActivity : AppCompatActivity() { Image( painter = painterResource(R.drawable.logo), contentDescription = "Application logo", - modifier = Modifier - .height(200.dp) - .fillMaxWidth(0.9f) + modifier = + Modifier + .height(200.dp) + .fillMaxWidth(0.9f) ) OutlinedTextField( @@ -104,9 +106,10 @@ class MainActivity : AppCompatActivity() { label = { Text("Room name") }, modifier = Modifier.focusOrder(first) { down = second }, keyboardOptions = KeyboardOptions(imeAction = ImeAction.Next), - keyboardActions = KeyboardActions( - onNext = { focusManager.moveFocus(FocusDirection.Down) } - ) + keyboardActions = + KeyboardActions( + onNext = { focusManager.moveFocus(FocusDirection.Down) } + ) ) Spacer(modifier = Modifier.height(16.dp)) OutlinedTextField( @@ -119,9 +122,10 @@ class MainActivity : AppCompatActivity() { label = { Text("Display name") }, modifier = Modifier.focusOrder(second) { down = third }, keyboardOptions = KeyboardOptions(imeAction = ImeAction.Next), - keyboardActions = KeyboardActions( - onNext = { focusManager.moveFocus(FocusDirection.Down) } - ) + keyboardActions = + KeyboardActions( + onNext = { focusManager.moveFocus(FocusDirection.Down) } + ) ) Spacer(modifier = Modifier.height(16.dp)) @@ -137,9 +141,10 @@ class MainActivity : AppCompatActivity() { enabled = !(roomName.value.text.isEmpty() || displayName.value.text.isEmpty()), colors = AppButtonColors(), shape = RoundedCornerShape(12.dp), - modifier = Modifier - .width(200.dp) - .focusOrder(third) + modifier = + Modifier + .width(200.dp) + .focusOrder(third) ) { Text("Join room") } @@ -151,12 +156,13 @@ class MainActivity : AppCompatActivity() { @OptIn(ExperimentalPermissionsApi::class) @Composable fun ConnectWithPermissions(content: @Composable () -> Unit) { - val multiplePermissionsState = rememberMultiplePermissionsState( - listOf( - RECORD_AUDIO, - CAMERA + val multiplePermissionsState = + rememberMultiplePermissionsState( + listOf( + RECORD_AUDIO, + CAMERA + ) ) - ) val alreadyRequested = remember { mutableStateOf(false) } @@ -164,21 +170,23 @@ class MainActivity : AppCompatActivity() { content() } else { Column( - modifier = Modifier - .fillMaxWidth() - .padding(10.dp), + modifier = + Modifier + .fillMaxWidth() + .padding(10.dp), horizontalAlignment = Alignment.CenterHorizontally ) { - val textToShow = when { - multiplePermissionsState.shouldShowRationale -> - "Application requires an access to a microphone and camera for it to work" + val textToShow = + when { + multiplePermissionsState.shouldShowRationale -> + "Application requires an access to a microphone and camera for it to work" - !multiplePermissionsState.shouldShowRationale && alreadyRequested.value -> - "You need to explicitly grant the access to the camera and microphone in system settings..." + !multiplePermissionsState.shouldShowRationale && alreadyRequested.value -> + "You need to explicitly grant the access to the camera and microphone in system settings..." - else -> - null - } + else -> + null + } Button( colors = AppButtonColors(), diff --git a/app/src/main/java/com/dscout/membranevideoroomdemo/RoomActivity.kt b/app/src/main/java/com/dscout/membranevideoroomdemo/RoomActivity.kt index 57dc7d5..61a4f76 100644 --- a/app/src/main/java/com/dscout/membranevideoroomdemo/RoomActivity.kt +++ b/app/src/main/java/com/dscout/membranevideoroomdemo/RoomActivity.kt @@ -57,8 +57,9 @@ class RoomActivity : AppCompatActivity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) - val (room, displayName) = intent.getParcelableExtra(ARGS) - ?: throw NullPointerException("Failed to decode intent's parcelable") + val (room, displayName) = + intent.getParcelableExtra(ARGS) + ?: throw NullPointerException("Failed to decode intent's parcelable") viewModel.connect(room, displayName) setContent { @@ -75,7 +76,11 @@ class RoomActivity : AppCompatActivity() { } @Composable - fun Content(viewModel: RoomViewModel, startScreencast: () -> Unit, onEnd: () -> Unit) { + fun Content( + viewModel: RoomViewModel, + startScreencast: () -> Unit, + onEnd: () -> Unit + ) { val participants = viewModel.participants.collectAsState() val primaryParticipant = viewModel.primaryParticipant.collectAsState() val errorMessage = viewModel.errorMessage.collectAsState() @@ -91,9 +96,10 @@ class RoomActivity : AppCompatActivity() { Box { Column( horizontalAlignment = Alignment.CenterHorizontally, - modifier = Modifier - .fillMaxWidth() - .fillMaxHeight() + modifier = + Modifier + .fillMaxWidth() + .fillMaxHeight() ) { errorMessage.value?.let { Text( @@ -125,15 +131,16 @@ class RoomActivity : AppCompatActivity() { viewModel.toggleVideoTrackEncoding(it) }, colors = AppButtonColors(), - modifier = Modifier.then( - if (videoSimulcastConfig.value.activeEncodings.contains(it)) { - Modifier.alpha(1f) - } else { - Modifier.alpha( - 0.5f - ) - } - ), + modifier = + Modifier.then( + if (videoSimulcastConfig.value.activeEncodings.contains(it)) { + Modifier.alpha(1f) + } else { + Modifier.alpha( + 0.5f + ) + } + ), shape = RoundedCornerShape(12.dp) ) { Text(it.name) @@ -150,20 +157,22 @@ class RoomActivity : AppCompatActivity() { } Column( - modifier = Modifier - .fillMaxWidth() - .height(248.dp) - .padding(10.dp) - .verticalScroll(scrollState), + modifier = + Modifier + .fillMaxWidth() + .height(248.dp) + .padding(10.dp) + .verticalScroll(scrollState), verticalArrangement = Arrangement.Center, horizontalAlignment = Alignment.CenterHorizontally ) { participants.value.chunked(2).forEach { Row( horizontalArrangement = Arrangement.Center, - modifier = Modifier - .fillMaxWidth() - .padding(8.dp) + modifier = + Modifier + .fillMaxWidth() + .padding(8.dp) ) { ParticipantCard( participant = it[0], @@ -238,13 +247,13 @@ fun ParticipantCard( "audio" -> { participant.audioTrack == null || ( participant.tracksMetadata.isNotEmpty() && isTrackNotActive(trackType) - ) + ) } "video" -> { participant.videoTrack == null || ( participant.tracksMetadata.isNotEmpty() && isTrackNotActive(trackType) - ) + ) } else -> { @@ -259,25 +268,27 @@ fun ParticipantCard( .size(20.dp) Box( - modifier = Modifier - .clickable( - indication = null, - interactionSource = remember { MutableInteractionSource() } - ) { - onClick?.invoke() - } - .clip(RoundedCornerShape(10.dp)) - .height(size.height.dp) - .width(size.width.dp) - .border(if (participant.vadStatus == VadStatus.SPEECH) 10.dp else 0.dp, Color.White) - .background(Blue.darker(0.7f)) + modifier = + Modifier + .clickable( + indication = null, + interactionSource = remember { MutableInteractionSource() } + ) { + onClick?.invoke() + } + .clip(RoundedCornerShape(10.dp)) + .height(size.height.dp) + .width(size.width.dp) + .border(if (participant.vadStatus == VadStatus.SPEECH) 10.dp else 0.dp, Color.White) + .background(Blue.darker(0.7f)) ) { if (shouldShowIcon("video")) { Box( - modifier = Modifier - .background(Blue.darker(0.7f)) - .fillMaxHeight() - .fillMaxWidth() + modifier = + Modifier + .background(Blue.darker(0.7f)) + .fillMaxHeight() + .fillMaxWidth() ) { Row(modifier = Modifier.align(Alignment.Center)) { Icon( @@ -292,11 +303,12 @@ fun ParticipantCard( ParticipantVideoView( participant = participant, videoViewLayout = videoViewLayout, - modifier = Modifier - .align(Alignment.Center) - .fillMaxWidth() - .fillMaxHeight() - .background(Blue.darker(0.7f)) + modifier = + Modifier + .align(Alignment.Center) + .fillMaxWidth() + .fillMaxHeight() + .background(Blue.darker(0.7f)) ) } @@ -307,16 +319,18 @@ fun ParticipantCard( text = participant.displayName, maxLines = 1, overflow = TextOverflow.Ellipsis, - modifier = Modifier - .align(Alignment.BottomStart) - .width(size.width.dp - 20.dp) - .padding(20.dp) + modifier = + Modifier + .align(Alignment.BottomStart) + .width(size.width.dp - 20.dp) + .padding(20.dp) ) if (shouldShowIcon("audio")) { Row( - modifier = Modifier - .align(Alignment.TopStart) + modifier = + Modifier + .align(Alignment.TopStart) ) { Icon( painter = painterResource(R.drawable.ic_mic_off), @@ -330,7 +344,11 @@ fun ParticipantCard( } @Composable -fun ControlIcons(roomViewModel: RoomViewModel, startScreencast: () -> Unit, onEnd: () -> Unit) { +fun ControlIcons( + roomViewModel: RoomViewModel, + startScreencast: () -> Unit, + onEnd: () -> Unit +) { val iconModifier = Modifier .padding(10.dp) @@ -344,9 +362,10 @@ fun ControlIcons(roomViewModel: RoomViewModel, startScreencast: () -> Unit, onEn LazyRow( horizontalArrangement = Arrangement.Center, verticalAlignment = Alignment.CenterVertically, - modifier = Modifier - .fillMaxWidth() - .background(Blue.darker(0.7f)) + modifier = + Modifier + .fillMaxWidth() + .background(Blue.darker(0.7f)) ) { item { IconButton(onClick = { roomViewModel.toggleMicrophone() }) { @@ -360,9 +379,10 @@ fun ControlIcons(roomViewModel: RoomViewModel, startScreencast: () -> Unit, onEn if (isMicOn.value) { IconButton(onClick = { roomViewModel.toggleSoundDetection() }) { Icon( - painter = painterResource( - if (isSoundDetectionOn.value) R.drawable.ic_mic_on else R.drawable.ic_mic_off - ), + painter = + painterResource( + if (isSoundDetectionOn.value) R.drawable.ic_mic_on else R.drawable.ic_mic_off + ), contentDescription = "sound detection control", modifier = iconModifier, tint = if (isSoundDetected.value) Color.Blue else Color.DarkGray @@ -407,9 +427,10 @@ fun ControlIcons(roomViewModel: RoomViewModel, startScreencast: () -> Unit, onEn } }) { Icon( - painter = painterResource( - if (!isScreenCastOn.value) R.drawable.ic_screen_on else R.drawable.ic_screen_off - ), + painter = + painterResource( + if (!isScreenCastOn.value) R.drawable.ic_screen_on else R.drawable.ic_screen_off + ), contentDescription = "screen cast control", modifier = iconModifier, tint = Color.White diff --git a/app/src/main/java/com/dscout/membranevideoroomdemo/components/ParticipantVideoView.kt b/app/src/main/java/com/dscout/membranevideoroomdemo/components/ParticipantVideoView.kt index 66d0591..c425313 100644 --- a/app/src/main/java/com/dscout/membranevideoroomdemo/components/ParticipantVideoView.kt +++ b/app/src/main/java/com/dscout/membranevideoroomdemo/components/ParticipantVideoView.kt @@ -21,11 +21,18 @@ enum class VideoViewLayout { } @Composable -fun ParticipantVideoView(participant: Participant, videoViewLayout: VideoViewLayout, modifier: Modifier = Modifier) { +fun ParticipantVideoView( + participant: Participant, + videoViewLayout: VideoViewLayout, + modifier: Modifier = Modifier +) { var activeVideoTrack by remember { mutableStateOf(null) } var view: VideoTextureViewRenderer? by remember { mutableStateOf(null) } - fun setupTrack(videoTrack: VideoTrack, view: VideoTextureViewRenderer) { + fun setupTrack( + videoTrack: VideoTrack, + view: VideoTextureViewRenderer + ) { if (activeVideoTrack == videoTrack) return activeVideoTrack?.removeRenderer(view) diff --git a/app/src/main/java/com/dscout/membranevideoroomdemo/models/Participant.kt b/app/src/main/java/com/dscout/membranevideoroomdemo/models/Participant.kt index 4b89083..7b2c81f 100644 --- a/app/src/main/java/com/dscout/membranevideoroomdemo/models/Participant.kt +++ b/app/src/main/java/com/dscout/membranevideoroomdemo/models/Participant.kt @@ -14,11 +14,15 @@ data class Participant( val tracksMetadata: Map = emptyMap(), val vadStatus: VadStatus = VadStatus.SILENCE ) { - fun updateTrackMetadata(trackId: String?, metadata: Metadata): Participant { + fun updateTrackMetadata( + trackId: String?, + metadata: Metadata + ): Participant { return this.copy( - tracksMetadata = this.tracksMetadata + ( - (trackId ?: "") - to metadata + tracksMetadata = + this.tracksMetadata + ( + (trackId ?: "") + to metadata ) ) } diff --git a/app/src/main/java/com/dscout/membranevideoroomdemo/styles/colors.kt b/app/src/main/java/com/dscout/membranevideoroomdemo/styles/colors.kt index c5b3a12..ab061da 100644 --- a/app/src/main/java/com/dscout/membranevideoroomdemo/styles/colors.kt +++ b/app/src/main/java/com/dscout/membranevideoroomdemo/styles/colors.kt @@ -6,7 +6,10 @@ import androidx.compose.ui.graphics.colorspace.ColorSpaces val Primary = Color(0xff1e3d80) val Blue = Color(0xff447bfe) -fun Color.mix(with: Color, amount: Float): Color { +fun Color.mix( + with: Color, + amount: Float +): Color { val red1 = this.red val red2 = with.red val green1 = this.green diff --git a/app/src/main/java/com/dscout/membranevideoroomdemo/styles/text_field.kt b/app/src/main/java/com/dscout/membranevideoroomdemo/styles/text_field.kt index 440be4d..18bf9de 100644 --- a/app/src/main/java/com/dscout/membranevideoroomdemo/styles/text_field.kt +++ b/app/src/main/java/com/dscout/membranevideoroomdemo/styles/text_field.kt @@ -15,28 +15,22 @@ fun AppTextFieldColors(): TextFieldColors { return DefaultTextFieldColors( textColor = Color.White.darker(0.1f), disabledTextColor = Color.White.darker(0.4f), - cursorColor = Color.White, errorCursorColor = Color.Red, - focusedIndicatorColor = Color.White, unfocusedIndicatorColor = Color.White.darker(0.1f), errorIndicatorColor = Color.Red, disabledIndicatorColor = Color.Gray.darker(0.4f), - leadingIconColor = Color.White, disabledLeadingIconColor = Color.Gray, errorLeadingIconColor = Color.Red, - trailingIconColor = Color.White, disabledTrailingIconColor = Color.Gray, errorTrailingIconColor = Color.Red, - focusedLabelColor = Color.White, unfocusedLabelColor = Color.White.darker(0.1f), disabledLabelColor = Color.Gray.darker(0.4f), errorLabelColor = Color.Red, - backgroundColor = Blue.darker(0.4f), placeholderColor = Color.White.darker(0.3f), disabledPlaceholderColor = Color.Gray.darker(0.5f) @@ -67,7 +61,10 @@ internal data class DefaultTextFieldColors( val disabledPlaceholderColor: Color ) : TextFieldColors { @Composable - override fun leadingIconColor(enabled: Boolean, isError: Boolean): State { + override fun leadingIconColor( + enabled: Boolean, + isError: Boolean + ): State { return rememberUpdatedState( when { !enabled -> disabledLeadingIconColor @@ -78,7 +75,10 @@ internal data class DefaultTextFieldColors( } @Composable - override fun trailingIconColor(enabled: Boolean, isError: Boolean): State { + override fun trailingIconColor( + enabled: Boolean, + isError: Boolean + ): State { return rememberUpdatedState( when { !enabled -> disabledTrailingIconColor @@ -96,12 +96,13 @@ internal data class DefaultTextFieldColors( ): State { val focused by interactionSource.collectIsFocusedAsState() - val targetValue = when { - !enabled -> disabledIndicatorColor - isError -> errorIndicatorColor - focused -> focusedIndicatorColor - else -> unfocusedIndicatorColor - } + val targetValue = + when { + !enabled -> disabledIndicatorColor + isError -> errorIndicatorColor + focused -> focusedIndicatorColor + else -> unfocusedIndicatorColor + } return if (enabled) { animateColorAsState(targetValue, tween(durationMillis = 150)) } else { @@ -120,15 +121,20 @@ internal data class DefaultTextFieldColors( } @Composable - override fun labelColor(enabled: Boolean, error: Boolean, interactionSource: InteractionSource): State { + override fun labelColor( + enabled: Boolean, + error: Boolean, + interactionSource: InteractionSource + ): State { val focused by interactionSource.collectIsFocusedAsState() - val targetValue = when { - !enabled -> disabledLabelColor - error -> errorLabelColor - focused -> focusedLabelColor - else -> unfocusedLabelColor - } + val targetValue = + when { + !enabled -> disabledLabelColor + error -> errorLabelColor + focused -> focusedLabelColor + else -> unfocusedLabelColor + } return rememberUpdatedState(targetValue) } diff --git a/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Shape.kt b/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Shape.kt index 06a2d48..a733010 100644 --- a/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Shape.kt +++ b/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Shape.kt @@ -4,8 +4,9 @@ import androidx.compose.foundation.shape.RoundedCornerShape import androidx.compose.material.Shapes import androidx.compose.ui.unit.dp -val Shapes = Shapes( - small = RoundedCornerShape(4.dp), - medium = RoundedCornerShape(4.dp), - large = RoundedCornerShape(0.dp) -) +val Shapes = + Shapes( + small = RoundedCornerShape(4.dp), + medium = RoundedCornerShape(4.dp), + large = RoundedCornerShape(0.dp) + ) diff --git a/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Theme.kt b/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Theme.kt index 66b81e7..aed7672 100644 --- a/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Theme.kt +++ b/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Theme.kt @@ -6,17 +6,18 @@ import androidx.compose.material.darkColors import androidx.compose.material.lightColors import androidx.compose.runtime.Composable -private val DarkColorPalette = darkColors( - primary = Purple200, - primaryVariant = Purple700, - secondary = Teal200 -) - -private val LightColorPalette = lightColors( - primary = Purple500, - primaryVariant = Purple700, - secondary = Teal200 +private val DarkColorPalette = + darkColors( + primary = Purple200, + primaryVariant = Purple700, + secondary = Teal200 + ) +private val LightColorPalette = + lightColors( + primary = Purple500, + primaryVariant = Purple700, + secondary = Teal200 /* Other default colors to override background = Color.White, surface = Color.White, @@ -25,15 +26,19 @@ private val LightColorPalette = lightColors( onBackground = Color.Black, onSurface = Color.Black, */ -) + ) @Composable -fun MembraneVideoroomDemoTheme(darkTheme: Boolean = isSystemInDarkTheme(), content: @Composable () -> Unit) { - val colors = if (darkTheme) { - DarkColorPalette - } else { - LightColorPalette - } +fun MembraneVideoroomDemoTheme( + darkTheme: Boolean = isSystemInDarkTheme(), + content: @Composable () -> Unit +) { + val colors = + if (darkTheme) { + DarkColorPalette + } else { + LightColorPalette + } MaterialTheme( colors = colors, diff --git a/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Type.kt b/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Type.kt index a0bf1f8..a3c132e 100644 --- a/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Type.kt +++ b/app/src/main/java/com/dscout/membranevideoroomdemo/ui/theme/Type.kt @@ -7,12 +7,14 @@ import androidx.compose.ui.text.font.FontWeight import androidx.compose.ui.unit.sp // Set of Material typography styles to start with -val Typography = Typography( - body1 = TextStyle( - fontFamily = FontFamily.Default, - fontWeight = FontWeight.Normal, - fontSize = 16.sp - ) +val Typography = + Typography( + body1 = + TextStyle( + fontFamily = FontFamily.Default, + fontWeight = FontWeight.Normal, + fontSize = 16.sp + ) /* Other default text styles to override button = TextStyle( fontFamily = FontFamily.Default, @@ -25,4 +27,4 @@ val Typography = Typography( fontSize = 12.sp ) */ -) + ) diff --git a/app/src/main/java/com/dscout/membranevideoroomdemo/viewmodels/RoomViewModel.kt b/app/src/main/java/com/dscout/membranevideoroomdemo/viewmodels/RoomViewModel.kt index ef99db0..470223d 100644 --- a/app/src/main/java/com/dscout/membranevideoroomdemo/viewmodels/RoomViewModel.kt +++ b/app/src/main/java/com/dscout/membranevideoroomdemo/viewmodels/RoomViewModel.kt @@ -46,15 +46,16 @@ class RoomViewModel( val errorMessage = MutableStateFlow(null) val soundVolumedB = MutableStateFlow(0) - val soundDetectionListener = object : OnSoundDetectedListener { - override fun onSoundDetected(isDetected: Boolean) { - isSoundDetected.value = isDetected - } + val soundDetectionListener = + object : OnSoundDetectedListener { + override fun onSoundDetected(isDetected: Boolean) { + isSoundDetected.value = isDetected + } - override fun onSoundVolumeChanged(volume: Int) { - soundVolumedB.value = volume + override fun onSoundVolumeChanged(volume: Int) { + soundVolumedB.value = volume + } } - } private var localScreencastId: String? = null @@ -63,35 +64,42 @@ class RoomViewModel( private lateinit var transport: PhoenixTransport - val videoSimulcastConfig = MutableStateFlow( - SimulcastConfig( - enabled = true, - activeEncodings = listOf( - TrackEncoding.L, - TrackEncoding.M, - TrackEncoding.H + val videoSimulcastConfig = + MutableStateFlow( + SimulcastConfig( + enabled = true, + activeEncodings = + listOf( + TrackEncoding.L, + TrackEncoding.M, + TrackEncoding.H + ) ) ) - ) - val screencastSimulcastConfig = MutableStateFlow( - SimulcastConfig( - enabled = false + val screencastSimulcastConfig = + MutableStateFlow( + SimulcastConfig( + enabled = false + ) ) - ) - fun connect(roomName: String, displayName: String) { + fun connect( + roomName: String, + displayName: String + ) { viewModelScope.launch { localDisplayName = displayName // disconnect from the current view room.value?.disconnect() - val transport = PhoenixTransport( - url, - "room:$roomName", - Dispatchers.IO, - params, - mapOf("isSimulcastOn" to true) - ) + val transport = + PhoenixTransport( + url, + "room:$roomName", + Dispatchers.IO, + params, + mapOf("isSimulcastOn" to true) + ) try { transport.connect(this@RoomViewModel) @@ -104,15 +112,18 @@ class RoomViewModel( this@RoomViewModel.transport = transport - room.value = MembraneRTC.create( - appContext = getApplication(), - options = CreateOptions( - encoderOptions = EncoderOptions( - encoderType = EncoderType.SOFTWARE - ) - ), - listener = this@RoomViewModel - ) + room.value = + MembraneRTC.create( + appContext = getApplication(), + options = + CreateOptions( + encoderOptions = + EncoderOptions( + encoderType = EncoderType.SOFTWARE + ) + ), + listener = this@RoomViewModel + ) room.value?.connect(mapOf("displayName" to (localDisplayName ?: ""))) } @@ -134,9 +145,10 @@ class RoomViewModel( if (localVideoTrack?.id() != primaryParticipantTrackId && localScreencastTrack?.id() != primaryParticipantTrackId ) { - val globalId = globalToLocalTrackId.filterValues { it1 -> - it1 == primaryParticipantTrackId - }.keys + val globalId = + globalToLocalTrackId.filterValues { it1 -> + it1 == primaryParticipantTrackId + }.keys if (globalId.isNotEmpty()) { room.value?.setTargetTrackEncoding(globalId.first(), TrackEncoding.L) } @@ -150,9 +162,10 @@ class RoomViewModel( } } - participants.value = candidates.filter { candidate -> - candidate.id != it.id - }.toList() + participants.value = + candidates.filter { candidate -> + candidate.id != it.id + }.toList() } } @@ -181,10 +194,11 @@ class RoomViewModel( val p = mutableParticipants[localEndpointId] if (p != null) { - mutableParticipants[localEndpointId] = p.updateTrackMetadata( - p.audioTrack?.id(), - mapOf("active" to isMicrophoneOn.value) - ) + mutableParticipants[localEndpointId] = + p.updateTrackMetadata( + p.audioTrack?.id(), + mapOf("active" to isMicrophoneOn.value) + ) } emitParticipants() @@ -212,10 +226,11 @@ class RoomViewModel( val p = mutableParticipants[localEndpointId] if (p != null) { - mutableParticipants[localEndpointId] = p.updateTrackMetadata( - p.videoTrack?.id(), - mapOf("active" to isCameraOn.value) - ) + mutableParticipants[localEndpointId] = + p.updateTrackMetadata( + p.videoTrack?.id(), + mapOf("active" to isCameraOn.value) + ) } emitParticipants() @@ -233,62 +248,71 @@ class RoomViewModel( private fun setupTracks() { room.value?.let { - localAudioTrack = it.createAudioTrack( - mapOf( - "user_id" to (localDisplayName ?: ""), - "active" to true, - "type" to "audio" + localAudioTrack = + it.createAudioTrack( + mapOf( + "user_id" to (localDisplayName ?: ""), + "active" to true, + "type" to "audio" + ) ) - ) var videoParameters = VideoParameters.presetHD169 - videoParameters = videoParameters.copy( - dimensions = videoParameters.dimensions, - simulcastConfig = videoSimulcastConfig.value, - maxBitrate = TrackBandwidthLimit.SimulcastBandwidthLimit( - mapOf( - "l" to TrackBandwidthLimit.BandwidthLimit(150), - "m" to TrackBandwidthLimit.BandwidthLimit(500), - "h" to TrackBandwidthLimit.BandwidthLimit(1500) - ) + videoParameters = + videoParameters.copy( + dimensions = videoParameters.dimensions, + simulcastConfig = videoSimulcastConfig.value, + maxBitrate = + TrackBandwidthLimit.SimulcastBandwidthLimit( + mapOf( + "l" to TrackBandwidthLimit.BandwidthLimit(150), + "m" to TrackBandwidthLimit.BandwidthLimit(500), + "h" to TrackBandwidthLimit.BandwidthLimit(1500) + ) + ) ) - ) - localVideoTrack = it.createVideoTrack( - videoParameters, - mapOf( - "user_id" to (localDisplayName ?: ""), - "active" to true, - "type" to "camera" + localVideoTrack = + it.createVideoTrack( + videoParameters, + mapOf( + "user_id" to (localDisplayName ?: ""), + "active" to true, + "type" to "camera" + ) ) - ) isCameraOn.value = localVideoTrack?.enabled() ?: false isMicrophoneOn.value = localAudioTrack?.enabled() ?: false val participant = Participant(localEndpointId, "Me", localVideoTrack, localAudioTrack) - mutableParticipants[localEndpointId] = participant.updateTrackMetadata( - participant.audioTrack?.id(), - mapOf("active" to isMicrophoneOn.value) - ).updateTrackMetadata( - participant.videoTrack?.id(), - mapOf("active" to isCameraOn.value) - ) + mutableParticipants[localEndpointId] = + participant.updateTrackMetadata( + participant.audioTrack?.id(), + mapOf("active" to isMicrophoneOn.value) + ).updateTrackMetadata( + participant.videoTrack?.id(), + mapOf("active" to isCameraOn.value) + ) } } // MembraneRTCListener callbacks - override fun onConnected(endpointID: String, otherEndpoints: List) { + override fun onConnected( + endpointID: String, + otherEndpoints: List + ) { Timber.i("Successfully join the room") otherEndpoints.forEach { - mutableParticipants[it.id] = Participant( - it.id, - it.metadata["displayName"] as? String ?: "UNKNOWN", - null, - null - ) + mutableParticipants[it.id] = + Participant( + it.id, + it.metadata["displayName"] as? String ?: "UNKNOWN", + null, + null + ) } setupTracks() @@ -307,55 +331,58 @@ class RoomViewModel( override fun onTrackReady(ctx: TrackContext) { val participant = mutableParticipants[ctx.endpoint.id] ?: return - val (id, newParticipant) = when (ctx.track) { - is RemoteVideoTrack -> { - globalToLocalTrackId[ctx.trackId] = (ctx.track as RemoteVideoTrack).id() - - if (ctx.metadata["type"] == "screensharing") { - Pair( - ctx.trackId, - participant.copy( - id = ctx.trackId, - displayName = "${participant.displayName} (screencast)", - videoTrack = ctx.track as RemoteVideoTrack + val (id, newParticipant) = + when (ctx.track) { + is RemoteVideoTrack -> { + globalToLocalTrackId[ctx.trackId] = (ctx.track as RemoteVideoTrack).id() + + if (ctx.metadata["type"] == "screensharing") { + Pair( + ctx.trackId, + participant.copy( + id = ctx.trackId, + displayName = "${participant.displayName} (screencast)", + videoTrack = ctx.track as RemoteVideoTrack + ) ) - ) - } else { - val p = participant.copy(videoTrack = ctx.track as RemoteVideoTrack) + } else { + val p = participant.copy(videoTrack = ctx.track as RemoteVideoTrack) + Pair( + ctx.endpoint.id, + p.copy( + tracksMetadata = + p.tracksMetadata + ( + ( + globalToLocalTrackId[ctx.trackId] + ?: "" + ) to ctx.metadata + ) + ) + ) + } + } + + is RemoteAudioTrack -> { + globalToLocalTrackId[ctx.trackId] = (ctx.track as RemoteAudioTrack).id() + val p = participant.copy(audioTrack = ctx.track as RemoteAudioTrack) Pair( ctx.endpoint.id, p.copy( - tracksMetadata = p.tracksMetadata + ( - ( - globalToLocalTrackId[ctx.trackId] - ?: "" + tracksMetadata = + p.tracksMetadata + ( + ( + globalToLocalTrackId[ctx.trackId] + ?: "" ) to ctx.metadata ) ) ) } - } - is RemoteAudioTrack -> { - globalToLocalTrackId[ctx.trackId] = (ctx.track as RemoteAudioTrack).id() - val p = participant.copy(audioTrack = ctx.track as RemoteAudioTrack) - Pair( - ctx.endpoint.id, - p.copy( - tracksMetadata = p.tracksMetadata + ( - ( - globalToLocalTrackId[ctx.trackId] - ?: "" - ) to ctx.metadata - ) - ) - ) + else -> + throw IllegalArgumentException("invalid type of incoming remote track") } - else -> - throw IllegalArgumentException("invalid type of incoming remote track") - } - mutableParticipants[id] = newParticipant emitParticipants() @@ -383,25 +410,27 @@ class RoomViewModel( emitParticipants() } else { - val participant = mutableParticipants[ctx.endpoint.id] - ?: throw IllegalArgumentException("No participant with id ${ctx.endpoint.id}") + val participant = + mutableParticipants[ctx.endpoint.id] + ?: throw IllegalArgumentException("No participant with id ${ctx.endpoint.id}") val localTrackId = globalToLocalTrackId[ctx.trackId] val audioTrackId = participant.audioTrack?.id() val videoTrackId = participant.videoTrack?.id() - val newParticipant = when { - localTrackId == videoTrackId -> - participant.copy(videoTrack = null) + val newParticipant = + when { + localTrackId == videoTrackId -> + participant.copy(videoTrack = null) - localTrackId == audioTrackId -> - participant.copy(audioTrack = null) + localTrackId == audioTrackId -> + participant.copy(audioTrack = null) - else -> - throw IllegalArgumentException( - "Track ${ctx.trackId} has not been found for given endpoint ${ctx.endpoint.id}" - ) - } + else -> + throw IllegalArgumentException( + "Track ${ctx.trackId} has not been found for given endpoint ${ctx.endpoint.id}" + ) + } globalToLocalTrackId.remove(ctx.trackId) @@ -418,15 +447,17 @@ class RoomViewModel( if (p != null) { // Updates metadata of given track if (ctx.metadata["type"] == "camera") { - mutableParticipants[ctx.endpoint.id] = p.updateTrackMetadata( - p.videoTrack?.id(), - ctx.metadata - ) + mutableParticipants[ctx.endpoint.id] = + p.updateTrackMetadata( + p.videoTrack?.id(), + ctx.metadata + ) } else { - mutableParticipants[ctx.endpoint.id] = p.updateTrackMetadata( - p.audioTrack?.id(), - ctx.metadata - ) + mutableParticipants[ctx.endpoint.id] = + p.updateTrackMetadata( + p.audioTrack?.id(), + ctx.metadata + ) } } @@ -435,10 +466,11 @@ class RoomViewModel( } override fun onEndpointAdded(endpoint: Endpoint) { - mutableParticipants[endpoint.id] = Participant( - id = endpoint.id, - displayName = endpoint.metadata["displayName"] as? String ?: "UNKNOWN" - ) + mutableParticipants[endpoint.id] = + Participant( + id = endpoint.id, + displayName = endpoint.metadata["displayName"] as? String ?: "UNKNOWN" + ) emitParticipants() Timber.i("Endpoint $endpoint has been added") @@ -464,26 +496,29 @@ class RoomViewModel( var videoParameters = VideoParameters.presetScreenShareHD15 val dimensions = videoParameters.dimensions.flip() - videoParameters = videoParameters.copy( - dimensions = dimensions, - simulcastConfig = screencastSimulcastConfig.value - ) + videoParameters = + videoParameters.copy( + dimensions = dimensions, + simulcastConfig = screencastSimulcastConfig.value + ) - localScreencastTrack = room.value?.createScreencastTrack( - mediaProjectionPermission, - videoParameters, - mapOf( - "type" to "screensharing", - "user_id" to (localDisplayName ?: "") + localScreencastTrack = + room.value?.createScreencastTrack( + mediaProjectionPermission, + videoParameters, + mapOf( + "type" to "screensharing", + "user_id" to (localDisplayName ?: "") + ) ) - ) localScreencastTrack?.let { - mutableParticipants[localScreencastId!!] = Participant( - id = localScreencastId!!, - displayName = "Me (screen cast)", - videoTrack = it - ) + mutableParticipants[localScreencastId!!] = + Participant( + id = localScreencastId!!, + displayName = "Me (screen cast)", + videoTrack = it + ) emitParticipants() } } @@ -510,10 +545,11 @@ class RoomViewModel( ) { if (simulcastConfig.value.activeEncodings.contains(encoding)) { room.value?.disableTrackEncoding(trackId, encoding) - simulcastConfig.value = SimulcastConfig( - true, - simulcastConfig.value.activeEncodings.filter { it != encoding } - ) + simulcastConfig.value = + SimulcastConfig( + true, + simulcastConfig.value.activeEncodings.filter { it != encoding } + ) } else { room.value?.enableTrackEncoding(trackId, encoding) simulcastConfig.value = SimulcastConfig(true, simulcastConfig.value.activeEncodings.plus(encoding))