Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

React native vb #1

Open
wants to merge 10 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ Everyone is welcome to our [Discourse community](https://react-native-webrtc.dis

## WebRTC Revision

* Currently used revision: [M100](https://github.com/jitsi/webrtc/releases/tag/v100.0.0)
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

please check latest jitsi webrtc version

* Currently used revision: [M94](https://github.com/jitsi/webrtc/releases/tag/v94.0.0)
* Supported architectures
* Android: armeabi-v7a, arm64-v8a, x86, x86_64
* iOS: arm64, x86_64 (for bitcode support, run [this script](https://github.com/react-native-webrtc/react-native-webrtc/blob/master/tools/downloadBitcode.sh))
Expand Down
10 changes: 10 additions & 0 deletions android/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,15 @@ def safeExtGet(prop, fallback) {
rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback
}

allprojects {
repositories {
google()
jcenter()
maven { url 'https://www.jitpack.io' }
mavenCentral()
}
}

android {
compileSdkVersion safeExtGet('compileSdkVersion', 23)
buildToolsVersion safeExtGet('buildToolsVersion', "23.0.1")
Expand All @@ -28,5 +37,6 @@ android {

dependencies {
implementation 'com.facebook.react:react-native:+'
implementation 'com.google.mlkit:segmentation-selfie:16.0.0-beta4'
api fileTree(dir: 'libs', include: ['*.jar'])
}
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ void getUserMedia(
cameraEnumerator,
videoConstraintsMap);

videoTrack = createVideoTrack(cameraCaptureController);
videoTrack = createVideoTrack(cameraCaptureController, videoConstraintsMap.hasKey("vb"));
}

if (audioTrack == null && videoTrack == null) {
Expand Down Expand Up @@ -355,10 +355,10 @@ private VideoTrack createScreenTrack() {
int height = displayMetrics.heightPixels;
ScreenCaptureController screenCaptureController
= new ScreenCaptureController(reactContext.getCurrentActivity(), width, height, mediaProjectionPermissionResultData);
return createVideoTrack(screenCaptureController);
return createVideoTrack(screenCaptureController, false);
}

private VideoTrack createVideoTrack(AbstractVideoCaptureController videoCaptureController) {
private VideoTrack createVideoTrack(AbstractVideoCaptureController videoCaptureController, Boolean vb) {
videoCaptureController.initializeVideoCapturer();

VideoCapturer videoCapturer = videoCaptureController.videoCapturer;
Expand All @@ -379,6 +379,11 @@ private VideoTrack createVideoTrack(AbstractVideoCaptureController videoCaptureC
VideoSource videoSource = pcFactory.createVideoSource(videoCapturer.isScreencast());
videoCapturer.initialize(surfaceTextureHelper, reactContext, videoSource.getCapturerObserver());

if(vb) {
VideoProcessor p = new VirtualBackgroundVideoProcessor(reactContext, surfaceTextureHelper);
videoSource.setVideoProcessor(p);
}

String id = UUID.randomUUID().toString();
VideoTrack track = pcFactory.createVideoTrack(id, videoSource);

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
package com.oney.WebRTCModule;

import static android.graphics.Color.argb;
import static android.graphics.PorterDuff.Mode.DST_OVER;
import static android.graphics.PorterDuff.Mode.SRC_IN;

import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PorterDuffXfermode;
import android.opengl.GLES20;
import android.opengl.GLUtils;

import androidx.annotation.Nullable;

import com.facebook.react.bridge.ReactApplicationContext;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.gms.tasks.Task;
import com.google.mlkit.vision.common.InputImage;
import com.google.mlkit.vision.segmentation.Segmentation;
import com.google.mlkit.vision.segmentation.SegmentationMask;
import com.google.mlkit.vision.segmentation.Segmenter;
import com.google.mlkit.vision.segmentation.selfie.SelfieSegmenterOptions;

import org.webrtc.SurfaceTextureHelper;
import org.webrtc.TextureBufferImpl;
import org.webrtc.VideoFrame;
import org.webrtc.VideoProcessor;
import org.webrtc.VideoSink;
import org.webrtc.YuvConverter;

public class VirtualBackgroundVideoProcessor implements VideoProcessor {

private VideoSink target;
private final SurfaceTextureHelper surfaceTextureHelper;
final YuvConverter yuvConverter = new YuvConverter();

private YuvFrame yuvFrame;
private Bitmap inputFrameBitmap;
private int frameCounter = 0;

final Bitmap backgroundImage;
final Bitmap scaled;

final SelfieSegmenterOptions options =
new SelfieSegmenterOptions.Builder()
.setDetectorMode(SelfieSegmenterOptions.STREAM_MODE)
.build();
final Segmenter segmenter = Segmentation.getClient(options);

public VirtualBackgroundVideoProcessor(ReactApplicationContext context, SurfaceTextureHelper surfaceTextureHelper) {
super();

this.surfaceTextureHelper = surfaceTextureHelper;

backgroundImage = BitmapFactory.decodeResource(context.getResources(), R.drawable.portrait_background);
scaled = Bitmap.createScaledBitmap(backgroundImage, 640, 640, false );
}

@Override
public void setSink(@Nullable VideoSink videoSink) {
target = videoSink;
}

@Override
public void onCapturerStarted(boolean b) {

}

@Override
public void onCapturerStopped() {

}

@Override
public void onFrameCaptured(VideoFrame videoFrame) {

if(frameCounter == 0) {
yuvFrame = new YuvFrame(videoFrame);
inputFrameBitmap = yuvFrame.getBitmap();

InputImage image = InputImage.fromBitmap(inputFrameBitmap, 0);

Task<SegmentationMask> result =
segmenter.process(image)
.addOnSuccessListener(
new OnSuccessListener<SegmentationMask>() {
@Override
public void onSuccess(SegmentationMask mask) {

mask.getBuffer().rewind();
int[] arr = maskColorsFromByteBuffer(mask);
Bitmap segmentedBitmap = Bitmap.createBitmap(
arr, mask.getWidth(), mask.getHeight(), Bitmap.Config.ARGB_8888
);
arr = null;

Bitmap segmentedBitmapMutable = segmentedBitmap.copy(Bitmap.Config.ARGB_8888, true);
segmentedBitmap.recycle();
Canvas canvas = new Canvas(segmentedBitmapMutable);

Paint paint = new Paint();
paint.setXfermode(new PorterDuffXfermode(SRC_IN));
canvas.drawBitmap(scaled, 0, 0, paint);
paint.setXfermode(new PorterDuffXfermode(DST_OVER));
canvas.drawBitmap(inputFrameBitmap, 0, 0, paint);

surfaceTextureHelper.getHandler().post(new Runnable() {
@Override
public void run() {

GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
TextureBufferImpl buffer = new TextureBufferImpl(segmentedBitmapMutable.getWidth(),
segmentedBitmapMutable.getHeight(), VideoFrame.TextureBuffer.Type.RGB,
GLES20.GL_TEXTURE0, new Matrix(), surfaceTextureHelper.getHandler(), yuvConverter, null);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE0);

GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, segmentedBitmapMutable, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);

VideoFrame.I420Buffer i420Buf = yuvConverter.convert(buffer);
VideoFrame out = new VideoFrame(i420Buf, 180, videoFrame.getTimestampNs());

buffer.release();
//yuvFrame.dispose();

target.onFrame(out);
out.release();
}
});

}
});
}
updateFrameCounter();
}

private void updateFrameCounter() {
frameCounter++;
if(frameCounter == 3) {
frameCounter = 0;
}
}

private int[] maskColorsFromByteBuffer(SegmentationMask mask) {
int[] colors = new int[mask.getHeight() * mask.getWidth()];
for (int i = 0; i < mask.getHeight() * mask.getWidth(); i++) {
float backgroundLikelihood = 1 - mask.getBuffer().getFloat();
if (backgroundLikelihood > 0.9) {
colors[i] = argb(255, 255, 0, 255);
} else if (backgroundLikelihood > 0.2) {
// Linear interpolation to make sure when backgroundLikelihood is 0.2, the alpha is 0 and
// when backgroundLikelihood is 0.9, the alpha is 128.
// +0.5 to round the float value to the nearest int.
double d = 182.9 * backgroundLikelihood - 36.6 + 0.5;
int alpha = (int) d;
colors[i] = argb(alpha, 255, 0, 255);
}
}
return colors;
}
}
Loading