Skip to content
This repository was archived by the owner on Jun 28, 2024. It is now read-only.

Commit

Permalink
Refactor websockets and other membranewebrtc remains (#70)
Browse files Browse the repository at this point in the history
## Description

Refactor websockets and other membranewebrtc remains. So SDK will be
fully related to fishjam server, not old membrane code
 
## Motivation and Context

Code cleanup

## How has this been tested?

Please describe in detail how you tested your changes. Include details
of your
testing environment, devices (ex. Iphone XYZ ios X.X.X & Samsung XYZ
android
X.X.X)

## Types of changes

- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing
functionality to
      not work as expected)

## Checklist:

- [ ] My code follows the code style of this project.
- [ ] My change requires a change to the documentation.
- [ ] I have updated the documentation accordingly.

## Screenshots (if appropriate)
  • Loading branch information
graszka22 authored Jun 25, 2024
1 parent 3606a39 commit dc6908c
Show file tree
Hide file tree
Showing 33 changed files with 572 additions and 776 deletions.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,9 @@ class ScreencastOptions : Record {
val maxBandwidthInt: Int = 0
}

class MembraneWebRTCModule : Module() {
class RNFishjamClientModule : Module() {
override fun definition() = ModuleDefinition {
Name("MembraneWebRTC")
Name("RNFishjamClient")

Events(
"IsCameraOn",
Expand All @@ -86,203 +86,204 @@ class MembraneWebRTCModule : Module() {
"BandwidthEstimation"
)

val membraneWebRTC = MembraneWebRTC { name: String, data: Map<String, Any?> ->
val rnFishjamClient = RNFishjamClient { name: String, data: Map<String, Any?> ->
sendEvent(name, data)
}

OnCreate {
membraneWebRTC.onModuleCreate(appContext)
rnFishjamClient.onModuleCreate(appContext)
}

OnDestroy {
membraneWebRTC.onModuleDestroy()
rnFishjamClient.onModuleDestroy()
}

OnActivityDestroys {
membraneWebRTC.disconnect()
rnFishjamClient.cleanUp()
}

OnActivityResult { _, result ->
membraneWebRTC.onActivityResult(result.requestCode, result.resultCode, result.data)
rnFishjamClient.onActivityResult(result.requestCode, result.resultCode, result.data)
}

AsyncFunction("create") Coroutine ({ ->
withContext(Dispatchers.Main) {
membraneWebRTC.create()
AsyncFunction("connect") { url: String, peerToken: String, promise: Promise ->
CoroutineScope(Dispatchers.Main).launch {
rnFishjamClient.create()
rnFishjamClient.connect(url, peerToken, promise)
}
})
}

AsyncFunction("receiveMediaEvent") Coroutine { data: String ->
withContext(Dispatchers.Main) {
membraneWebRTC.receiveMediaEvent(data)
AsyncFunction("joinRoom") { peerMetadata: Map<String, Any>, promise: Promise ->
CoroutineScope(Dispatchers.Main).launch {
rnFishjamClient.joinRoom(peerMetadata, promise)
}
}

AsyncFunction("connect") { endpointMetadata: Map<String, Any>, promise: Promise ->
AsyncFunction("leaveRoom") { ->
CoroutineScope(Dispatchers.Main).launch {
membraneWebRTC.connect(endpointMetadata, promise)
rnFishjamClient.leaveRoom()
}
}

AsyncFunction("disconnect") Coroutine { ->
AsyncFunction("cleanUp") Coroutine { ->
withContext(Dispatchers.Main) {
membraneWebRTC.disconnect()
rnFishjamClient.cleanUp()
}
}


AsyncFunction("startCamera") Coroutine { config: CameraConfig ->
withContext(Dispatchers.Main) {
membraneWebRTC.startCamera(config)
rnFishjamClient.startCamera(config)
}
}

AsyncFunction("startMicrophone") Coroutine { config: MicrophoneConfig ->
withContext(Dispatchers.Main) {
membraneWebRTC.startMicrophone(config)
rnFishjamClient.startMicrophone(config)
}
}

Property("isMicrophoneOn") {
return@Property membraneWebRTC.isMicrophoneOn
return@Property rnFishjamClient.isMicrophoneOn
}

AsyncFunction("toggleMicrophone") Coroutine { ->
withContext(Dispatchers.Main) {
membraneWebRTC.toggleMicrophone()
rnFishjamClient.toggleMicrophone()
}
}

Property("isCameraOn") {
return@Property membraneWebRTC.isCameraOn
return@Property rnFishjamClient.isCameraOn
}

AsyncFunction("toggleCamera") Coroutine { ->
withContext(Dispatchers.Main) {
membraneWebRTC.toggleCamera()
rnFishjamClient.toggleCamera()
}
}

AsyncFunction("flipCamera") Coroutine { ->
withContext(Dispatchers.Main) {
membraneWebRTC.flipCamera()
rnFishjamClient.flipCamera()
}
}

AsyncFunction("switchCamera") Coroutine { captureDeviceId: String ->
withContext(Dispatchers.Main) {
membraneWebRTC.switchCamera(captureDeviceId)
rnFishjamClient.switchCamera(captureDeviceId)
}
}

AsyncFunction("getCaptureDevices") Coroutine { ->
withContext(Dispatchers.Main) {
membraneWebRTC.getCaptureDevices()
rnFishjamClient.getCaptureDevices()
}
}

AsyncFunction("toggleScreencast") { screencastOptions: ScreencastOptions, promise: Promise ->
CoroutineScope(Dispatchers.Main).launch {
membraneWebRTC.toggleScreencast(screencastOptions, promise)
rnFishjamClient.toggleScreencast(screencastOptions, promise)
}
}

Property("isScreencastOn") {
return@Property membraneWebRTC.isScreencastOn
return@Property rnFishjamClient.isScreencastOn
}

AsyncFunction("getEndpoints") Coroutine { ->
withContext(Dispatchers.Main) {
membraneWebRTC.getEndpoints()
rnFishjamClient.getEndpoints()
}
}

AsyncFunction("updateEndpointMetadata") Coroutine { metadata: Map<String, Any> ->
withContext(Dispatchers.Main) {
membraneWebRTC.updateEndpointMetadata(metadata)
rnFishjamClient.updateEndpointMetadata(metadata)
}
}

AsyncFunction("updateVideoTrackMetadata") Coroutine { metadata: Map<String, Any> ->
withContext(Dispatchers.Main) {
membraneWebRTC.updateLocalVideoTrackMetadata(metadata)
rnFishjamClient.updateLocalVideoTrackMetadata(metadata)
}
}

AsyncFunction("updateAudioTrackMetadata") Coroutine { metadata: Map<String, Any> ->
withContext(Dispatchers.Main) {
membraneWebRTC.updateLocalAudioTrackMetadata(metadata)
rnFishjamClient.updateLocalAudioTrackMetadata(metadata)
}
}

AsyncFunction("updateScreencastTrackMetadata") Coroutine { metadata: Map<String, Any> ->
withContext(Dispatchers.Main) {
membraneWebRTC.updateLocalScreencastTrackMetadata(metadata)
rnFishjamClient.updateLocalScreencastTrackMetadata(metadata)
}
}

AsyncFunction("setOutputAudioDevice") { audioDevice: String ->
membraneWebRTC.setOutputAudioDevice(audioDevice)
rnFishjamClient.setOutputAudioDevice(audioDevice)
}

AsyncFunction("startAudioSwitcher") {
membraneWebRTC.startAudioSwitcher()
rnFishjamClient.startAudioSwitcher()
}

AsyncFunction("stopAudioSwitcher") {
membraneWebRTC.stopAudioSwitcher()
rnFishjamClient.stopAudioSwitcher()
}

AsyncFunction("toggleScreencastTrackEncoding") Coroutine { encoding: String ->
withContext(Dispatchers.Main) {
membraneWebRTC.toggleScreencastTrackEncoding(encoding)
rnFishjamClient.toggleScreencastTrackEncoding(encoding)
}
}

AsyncFunction("setScreencastTrackBandwidth") Coroutine { bandwidth: Int ->
withContext(Dispatchers.Main) {
membraneWebRTC.setScreencastTrackBandwidth(bandwidth)
rnFishjamClient.setScreencastTrackBandwidth(bandwidth)
}
}

AsyncFunction("setScreencastTrackEncodingBandwidth") Coroutine { encoding: String, bandwidth: Int ->
withContext(Dispatchers.Main) {
membraneWebRTC.setScreencastTrackEncodingBandwidth(encoding, bandwidth)
rnFishjamClient.setScreencastTrackEncodingBandwidth(encoding, bandwidth)
}
}

AsyncFunction("setTargetTrackEncoding") Coroutine { trackId: String, encoding: String ->
withContext(Dispatchers.Main) {
membraneWebRTC.setTargetTrackEncoding(trackId, encoding)
rnFishjamClient.setTargetTrackEncoding(trackId, encoding)
}
}

AsyncFunction("toggleVideoTrackEncoding") Coroutine { encoding: String ->
withContext(Dispatchers.Main) {
membraneWebRTC.toggleVideoTrackEncoding(encoding)
rnFishjamClient.toggleVideoTrackEncoding(encoding)
}
}

AsyncFunction("setVideoTrackEncodingBandwidth") Coroutine { encoding: String, bandwidth: Int ->
withContext(Dispatchers.Main) {
membraneWebRTC.setVideoTrackEncodingBandwidth(encoding, bandwidth)
rnFishjamClient.setVideoTrackEncodingBandwidth(encoding, bandwidth)
}
}

AsyncFunction("setVideoTrackBandwidth") Coroutine { bandwidth: Int ->
withContext(Dispatchers.Main) {
membraneWebRTC.setVideoTrackBandwidth(bandwidth)
rnFishjamClient.setVideoTrackBandwidth(bandwidth)
}
}

AsyncFunction("changeWebRTCLoggingSeverity") Coroutine { severity: String ->
CoroutineScope(Dispatchers.Main).launch {
membraneWebRTC.changeWebRTCLoggingSeverity(severity)
rnFishjamClient.changeWebRTCLoggingSeverity(severity)
}
}

AsyncFunction("getStatistics") { ->
membraneWebRTC.getStatistics()
rnFishjamClient.getStatistics()
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ import org.membraneframework.rtc.ui.VideoTextureViewRenderer
import org.webrtc.RendererCommon

class VideoRendererView(context: Context, appContext: AppContext) : ExpoView(context, appContext),
MembraneWebRTC.OnTrackUpdateListener {
RNFishjamClient.OnTrackUpdateListener {
var isInitialized = false
var activeVideoTrack: VideoTrack? = null
var trackId: String? = null

private val videoView = VideoTextureViewRenderer(context).also {
addView(it)
MembraneWebRTC.onTracksUpdateListeners.add(this)
RNFishjamClient.onTracksUpdateListeners.add(this)
}

private fun setupTrack(videoTrack: VideoTrack) {
Expand All @@ -31,7 +31,7 @@ class VideoRendererView(context: Context, appContext: AppContext) : ExpoView(con

private fun update() {
CoroutineScope(Dispatchers.Main).launch {
val endpoint = MembraneWebRTC.endpoints.values.firstOrNull { it.videoTracks[trackId] != null }
val endpoint = RNFishjamClient.endpoints.values.firstOrNull { it.videoTracks[trackId] != null }
val videoTrack = endpoint?.videoTracks?.get(trackId) ?: return@launch
if(!isInitialized) {
isInitialized = true
Expand All @@ -49,7 +49,7 @@ class VideoRendererView(context: Context, appContext: AppContext) : ExpoView(con
fun dispose() {
activeVideoTrack?.removeRenderer(videoView)
videoView.release()
MembraneWebRTC.onTracksUpdateListeners.remove(this)
RNFishjamClient.onTracksUpdateListeners.remove(this)
}

override fun onTracksUpdate() {
Expand All @@ -70,4 +70,4 @@ class VideoRendererView(context: Context, appContext: AppContext) : ExpoView(con
videoView.setMirror(mirrorVideo)
}

}
}
5 changes: 1 addition & 4 deletions example/App.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { FishjamContextProvider } from '@fishjam-dev/react-native-client';
import React from 'react';
import { GestureHandlerRootView } from 'react-native-gesture-handler';
import Toast from 'react-native-toast-message';
Expand All @@ -9,9 +8,7 @@ function App(): React.JSX.Element {
return (
<>
<GestureHandlerRootView>
<FishjamContextProvider>
<AppNavigator />
</FishjamContextProvider>
<AppNavigator />
</GestureHandlerRootView>
<Toast />
</>
Expand Down
7 changes: 3 additions & 4 deletions example/hooks/useJoinRoom.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import {
useCamera,
useFishjamClient,
joinRoom as fjJoinRoom,
useMicrophone,
VideoQuality,
} from '@fishjam-dev/react-native-client';
Expand Down Expand Up @@ -47,7 +47,6 @@ export function useJoinRoom({
isCameraAvailable,
isMicrophoneAvailable,
}: Props) {
const { join } = useFishjamClient();
const { startCamera, getCaptureDevices } = useCamera();
const { startMicrophone } = useMicrophone();

Expand All @@ -68,7 +67,7 @@ export function useJoinRoom({
cameraEnabled: isCameraAvailable,
});

await join({
await fjJoinRoom({
name: 'RN mobile',
});

Expand All @@ -79,7 +78,7 @@ export function useJoinRoom({
}, [
isCameraAvailable,
isMicrophoneAvailable,
join,
fjJoinRoom,
startCamera,
startMicrophone,
]);
Expand Down
Loading

0 comments on commit dc6908c

Please sign in to comment.