From 0edcb278229d4eae533cc1dc3a4647700fb6325c Mon Sep 17 00:00:00 2001 From: Sebastian Roth Date: Sun, 7 Jun 2020 13:17:06 +0100 Subject: [PATCH 1/6] Bump AGP to 3.6.3 --- android/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/android/build.gradle b/android/build.gradle index b345cb57cd..dbd4233cff 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -8,7 +8,7 @@ buildscript { } dependencies { - classpath 'com.android.tools.build:gradle:3.3.1' + classpath 'com.android.tools.build:gradle:3.6.3' } } From f04e600998ac1c06d61e50497f1580e2100f13d3 Mon Sep 17 00:00:00 2001 From: Sebastian Roth Date: Sun, 7 Jun 2020 13:21:00 +0100 Subject: [PATCH 2/6] Refactors Android plugin alongside the embedding V2 migration --- .../webrtc/DataChannelObserver.java | 148 +- .../webrtc/FlutterWebRTCPlugin.java | 1461 ++------------ .../cloudwebrtc/webrtc/GetUserMediaImpl.java | 1736 +++++++++-------- .../webrtc/MethodCallHandlerImpl.java | 1400 +++++++++++++ .../webrtc/PeerConnectionObserver.java | 832 ++++---- .../com/cloudwebrtc/webrtc/StateProvider.java | 29 + .../webrtc/utils/MediaConstraintsUtils.java | 92 + .../webrtc/utils/PermissionUtils.java | 32 +- 8 files changed, 3025 insertions(+), 2705 deletions(-) create mode 100644 android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java create mode 100644 android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java create mode 100644 android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java index ec98688052..49b25dcad6 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java @@ -1,96 +1,94 @@ package com.cloudwebrtc.webrtc; -import java.nio.charset.Charset; -import android.util.Base64; - -import org.webrtc.DataChannel; -import io.flutter.plugin.common.EventChannel; import com.cloudwebrtc.webrtc.utils.AnyThreadSink; import com.cloudwebrtc.webrtc.utils.ConstraintsMap; +import io.flutter.plugin.common.BinaryMessenger; +import io.flutter.plugin.common.EventChannel; +import java.nio.charset.Charset; +import org.webrtc.DataChannel; class DataChannelObserver implements DataChannel.Observer, EventChannel.StreamHandler { - private final int mId; - private final DataChannel mDataChannel; - private final String peerConnectionId; - private final FlutterWebRTCPlugin plugin; - private EventChannel eventChannel; - private EventChannel.EventSink eventSink; - DataChannelObserver(FlutterWebRTCPlugin plugin, String peerConnectionId, int id, DataChannel dataChannel) { - this.peerConnectionId = peerConnectionId; - mId = id; - mDataChannel = dataChannel; - this.plugin = plugin; - this.eventChannel = - new EventChannel( - plugin.registrar().messenger(), - "FlutterWebRTC/dataChannelEvent" + peerConnectionId + String.valueOf(id)); - eventChannel.setStreamHandler(this); - } + private final int mId; + private final DataChannel mDataChannel; - private String dataChannelStateString(DataChannel.State dataChannelState) { - switch (dataChannelState) { - case CONNECTING: - return "connecting"; - case OPEN: - return "open"; - case CLOSING: - return "closing"; - case CLOSED: - return "closed"; - } - return ""; - } + private EventChannel eventChannel; + private EventChannel.EventSink eventSink; - @Override - public void onListen(Object o, EventChannel.EventSink sink) { - eventSink = new AnyThreadSink(sink); - } + DataChannelObserver(BinaryMessenger messenger, String peerConnectionId, int id, + DataChannel dataChannel) { + mId = id; + mDataChannel = dataChannel; + eventChannel = + new EventChannel(messenger, "FlutterWebRTC/dataChannelEvent" + peerConnectionId + id); + eventChannel.setStreamHandler(this); + } - @Override - public void onCancel(Object o) { - eventSink = null; + private String dataChannelStateString(DataChannel.State dataChannelState) { + switch (dataChannelState) { + case CONNECTING: + return "connecting"; + case OPEN: + return "open"; + case CLOSING: + return "closing"; + case CLOSED: + return "closed"; } + return ""; + } - @Override - public void onBufferedAmountChange(long amount) { } + @Override + public void onListen(Object o, EventChannel.EventSink sink) { + eventSink = new AnyThreadSink(sink); + } - @Override - public void onStateChange() { - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "dataChannelStateChanged"); - params.putInt("id", mDataChannel.id()); - params.putString("state", dataChannelStateString(mDataChannel.state())); - sendEvent(params); - } + @Override + public void onCancel(Object o) { + eventSink = null; + } - @Override - public void onMessage(DataChannel.Buffer buffer) { - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "dataChannelReceiveMessage"); - params.putInt("id", mDataChannel.id()); + @Override + public void onBufferedAmountChange(long amount) { + } - byte[] bytes; - if (buffer.data.hasArray()) { - bytes = buffer.data.array(); - } else { - bytes = new byte[buffer.data.remaining()]; - buffer.data.get(bytes); - } + @Override + public void onStateChange() { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "dataChannelStateChanged"); + params.putInt("id", mDataChannel.id()); + params.putString("state", dataChannelStateString(mDataChannel.state())); + sendEvent(params); + } - if (buffer.binary) { - params.putString("type", "binary"); - params.putByte("data", bytes); - } else { - params.putString("type", "text"); - params.putString("data", new String(bytes, Charset.forName("UTF-8"))); - } + @Override + public void onMessage(DataChannel.Buffer buffer) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "dataChannelReceiveMessage"); + params.putInt("id", mDataChannel.id()); - sendEvent(params); + byte[] bytes; + if (buffer.data.hasArray()) { + bytes = buffer.data.array(); + } else { + bytes = new byte[buffer.data.remaining()]; + buffer.data.get(bytes); } - void sendEvent(ConstraintsMap params) { - if(eventSink != null) - eventSink.success(params.toMap()); + if (buffer.binary) { + params.putString("type", "binary"); + params.putByte("data", bytes); + } else { + params.putString("type", "text"); + params.putString("data", new String(bytes, Charset.forName("UTF-8"))); + } + + sendEvent(params); + } + + private void sendEvent(ConstraintsMap params) { + if (eventSink != null) { + eventSink.success(params.toMap()); } + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java index f4ae90c52d..4b7d248b28 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java @@ -2,1373 +2,140 @@ import android.app.Activity; import android.content.Context; -import android.hardware.Camera; -import android.graphics.SurfaceTexture; -import android.media.AudioManager; import android.util.Log; -import android.util.LongSparseArray; - -import com.cloudwebrtc.webrtc.record.AudioChannel; -import com.cloudwebrtc.webrtc.record.FrameCapturer; -import com.cloudwebrtc.webrtc.utils.AnyThreadResult; -import com.cloudwebrtc.webrtc.utils.ConstraintsArray; -import com.cloudwebrtc.webrtc.utils.ConstraintsMap; -import com.cloudwebrtc.webrtc.utils.EglUtils; -import com.cloudwebrtc.webrtc.utils.ObjectType; +import androidx.annotation.NonNull; +import com.cloudwebrtc.webrtc.MethodCallHandlerImpl.AudioManager; import com.cloudwebrtc.webrtc.utils.RTCAudioManager; - -import java.io.UnsupportedEncodingException; -import java.io.File; -import java.nio.ByteBuffer; -import java.util.*; - -import org.webrtc.AudioTrack; -import org.webrtc.DefaultVideoDecoderFactory; -import org.webrtc.DefaultVideoEncoderFactory; -import org.webrtc.EglBase; -import org.webrtc.IceCandidate; -import org.webrtc.Logging; -import org.webrtc.MediaConstraints; -import org.webrtc.MediaStream; -import org.webrtc.MediaStreamTrack; -import org.webrtc.PeerConnection; -import org.webrtc.PeerConnectionFactory; -import org.webrtc.SdpObserver; -import org.webrtc.SessionDescription; -import org.webrtc.VideoTrack; -import org.webrtc.audio.AudioDeviceModule; -import org.webrtc.audio.JavaAudioDeviceModule; - -import io.flutter.plugin.common.EventChannel; +import io.flutter.embedding.engine.plugins.FlutterPlugin; +import io.flutter.embedding.engine.plugins.activity.ActivityAware; +import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding; +import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.MethodChannel; -import io.flutter.plugin.common.MethodChannel.MethodCallHandler; -import io.flutter.plugin.common.MethodChannel.Result; -import io.flutter.plugin.common.MethodCall; import io.flutter.plugin.common.PluginRegistry.Registrar; import io.flutter.view.TextureRegistry; +import java.util.Set; /** * FlutterWebRTCPlugin */ -public class FlutterWebRTCPlugin implements MethodCallHandler { - - static public final String TAG = "FlutterWebRTCPlugin"; - - private final Registrar registrar; - private final MethodChannel channel; - - public Map localStreams; - public Map localTracks; - private final Map mPeerConnectionObservers; - - private final TextureRegistry textures; - private LongSparseArray renders = new LongSparseArray<>(); - - /** - * The implementation of {@code getUserMedia} extracted into a separate file - * in order to reduce complexity and to (somewhat) separate concerns. - */ - private GetUserMediaImpl getUserMediaImpl; - final PeerConnectionFactory mFactory; - - private AudioDeviceModule audioDeviceModule; - - private RTCAudioManager rtcAudioManager; - - public Activity getActivity() { - return registrar.activity(); - } - - public Context getContext() { - return registrar.context(); - } - - /** - * Plugin registration. - */ - public static void registerWith(Registrar registrar) { - final MethodChannel channel = new MethodChannel(registrar.messenger(), "FlutterWebRTC.Method"); - channel.setMethodCallHandler(new FlutterWebRTCPlugin(registrar, channel)); - } - - public Registrar registrar() { - return this.registrar; - } - - private FlutterWebRTCPlugin(Registrar registrar, MethodChannel channel) { - this.registrar = registrar; - this.channel = channel; - this.textures = registrar.textures(); - mPeerConnectionObservers = new HashMap(); - localStreams = new HashMap(); - localTracks = new HashMap(); - - PeerConnectionFactory.initialize( - PeerConnectionFactory.InitializationOptions.builder(registrar.context()) - .setEnableInternalTracer(true) - .createInitializationOptions()); - - // Initialize EGL contexts required for HW acceleration. - EglBase.Context eglContext = EglUtils.getRootEglBaseContext(); - - getUserMediaImpl = new GetUserMediaImpl(this, registrar.context()); - - audioDeviceModule = JavaAudioDeviceModule.builder(registrar.context()) - .setUseHardwareAcousticEchoCanceler(true) - .setUseHardwareNoiseSuppressor(true) - .setSamplesReadyCallback(getUserMediaImpl.inputSamplesInterceptor) - .createAudioDeviceModule(); - - getUserMediaImpl.audioDeviceModule = (JavaAudioDeviceModule) audioDeviceModule; - - mFactory = PeerConnectionFactory.builder() - .setOptions(new PeerConnectionFactory.Options()) - .setVideoEncoderFactory(new DefaultVideoEncoderFactory(eglContext, false, true)) - .setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglContext)) - .setAudioDeviceModule(audioDeviceModule) - .createPeerConnectionFactory(); - } - - private void startAudioManager() { - if(rtcAudioManager != null) - return; - - rtcAudioManager = RTCAudioManager.create(registrar.context()); - // Store existing audio settings and change audio mode to - // MODE_IN_COMMUNICATION for best possible VoIP performance. - Log.d(TAG, "Starting the audio manager..."); - rtcAudioManager.start(new RTCAudioManager.AudioManagerEvents() { - // This method will be called each time the number of available audio - // devices has changed. - @Override - public void onAudioDeviceChanged( - RTCAudioManager.AudioDevice audioDevice, Set availableAudioDevices) { - onAudioManagerDevicesChanged(audioDevice, availableAudioDevices); - } - }); - } - - private void stopAudioManager() { - if (rtcAudioManager != null) { - Log.d(TAG, "Stoping the audio manager..."); - rtcAudioManager.stop(); - rtcAudioManager = null; - } - } - - // This method is called when the audio manager reports audio device change, - // e.g. from wired headset to speakerphone. - private void onAudioManagerDevicesChanged( - final RTCAudioManager.AudioDevice device, final Set availableDevices) { - Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", " - + "selected: " + device); - // TODO(henrika): add callback handler. - } - - @Override - public void onMethodCall(MethodCall call, Result notSafeResult) { - final AnyThreadResult result = new AnyThreadResult(notSafeResult); - if (call.method.equals("createPeerConnection")) { - Map constraints = call.argument("constraints"); - Map configuration = call.argument("configuration"); - String peerConnectionId = peerConnectionInit(new ConstraintsMap(configuration), new ConstraintsMap((constraints))); - ConstraintsMap res = new ConstraintsMap(); - res.putString("peerConnectionId", peerConnectionId); - result.success(res.toMap()); - } else if (call.method.equals("getUserMedia")) { - Map constraints = call.argument("constraints"); - ConstraintsMap constraintsMap = new ConstraintsMap(constraints); - getUserMedia(constraintsMap, result); - } else if (call.method.equals("createLocalMediaStream")) { - createLocalMediaStream(result); - }else if (call.method.equals("getSources")) { - getSources(result); - }else if (call.method.equals("createOffer")) { - String peerConnectionId = call.argument("peerConnectionId"); - Map constraints = call.argument("constraints"); - peerConnectionCreateOffer(peerConnectionId, new ConstraintsMap(constraints), result); - } else if (call.method.equals("createAnswer")) { - String peerConnectionId = call.argument("peerConnectionId"); - Map constraints = call.argument("constraints"); - peerConnectionCreateAnswer(peerConnectionId, new ConstraintsMap(constraints), result); - } else if (call.method.equals("mediaStreamGetTracks")) { - String streamId = call.argument("streamId"); - MediaStream stream = getStreamForId(streamId,""); - Map resultMap = new HashMap<>(); - List audioTracks = new ArrayList<>(); - List videoTracks = new ArrayList<>(); - for (AudioTrack track : stream.audioTracks) { - localTracks.put(track.id(), track); - Map trackMap = new HashMap<>(); - trackMap.put("enabled", track.enabled()); - trackMap.put("id", track.id()); - trackMap.put("kind", track.kind()); - trackMap.put("label", track.id()); - trackMap.put("readyState", "live"); - trackMap.put("remote", false); - audioTracks.add(trackMap); - } - for (VideoTrack track : stream.videoTracks) { - localTracks.put(track.id(), track); - Map trackMap = new HashMap<>(); - trackMap.put("enabled", track.enabled()); - trackMap.put("id", track.id()); - trackMap.put("kind", track.kind()); - trackMap.put("label", track.id()); - trackMap.put("readyState", "live"); - trackMap.put("remote", false); - videoTracks.add(trackMap); - } - resultMap.put("audioTracks", audioTracks); - resultMap.put("videoTracks", videoTracks); - result.success(resultMap); - } else if (call.method.equals("addStream")) { - String streamId = call.argument("streamId"); - String peerConnectionId = call.argument("peerConnectionId"); - peerConnectionAddStream(streamId, peerConnectionId, result); - } else if (call.method.equals("removeStream")) { - String streamId = call.argument("streamId"); - String peerConnectionId = call.argument("peerConnectionId"); - peerConnectionRemoveStream(streamId, peerConnectionId, result); - } else if (call.method.equals("setLocalDescription")) { - String peerConnectionId = call.argument("peerConnectionId"); - Map description = call.argument("description"); - peerConnectionSetLocalDescription(new ConstraintsMap(description), peerConnectionId, result); - } else if (call.method.equals("setRemoteDescription")) { - String peerConnectionId = call.argument("peerConnectionId"); - Map description = call.argument("description"); - peerConnectionSetRemoteDescription(new ConstraintsMap(description), peerConnectionId, result); - } else if (call.method.equals("addCandidate")) { - String peerConnectionId = call.argument("peerConnectionId"); - Map candidate = call.argument("candidate"); - peerConnectionAddICECandidate(new ConstraintsMap(candidate), peerConnectionId, result); - } else if (call.method.equals("getStats")) { - String peerConnectionId = call.argument("peerConnectionId"); - String trackId = call.argument("trackId"); - peerConnectionGetStats(trackId, peerConnectionId, result); - } else if (call.method.equals("createDataChannel")) { - String peerConnectionId = call.argument("peerConnectionId"); - String label = call.argument("label"); - Map dataChannelDict = call.argument("dataChannelDict"); - createDataChannel(peerConnectionId, label, new ConstraintsMap(dataChannelDict), result); - } else if (call.method.equals("dataChannelSend")) { - String peerConnectionId = call.argument("peerConnectionId"); - int dataChannelId = call.argument("dataChannelId"); - String type = call.argument("type"); - Boolean isBinary = type.equals("binary"); - ByteBuffer byteBuffer; - if(isBinary){ - byteBuffer = ByteBuffer.wrap(call.argument("data")); - }else{ - try { - String data = call.argument("data"); - byteBuffer = ByteBuffer.wrap(data.getBytes("UTF-8")); - } catch (UnsupportedEncodingException e) { - Log.d(TAG, "Could not encode text string as UTF-8."); - result.error("dataChannelSendFailed", "Could not encode text string as UTF-8.",null); - return; - } - } - dataChannelSend(peerConnectionId, dataChannelId, byteBuffer, isBinary); - result.success(null); - } else if (call.method.equals("dataChannelClose")) { - String peerConnectionId = call.argument("peerConnectionId"); - int dataChannelId = call.argument("dataChannelId"); - dataChannelClose(peerConnectionId, dataChannelId); - result.success(null); - } else if (call.method.equals("streamDispose")) { - String streamId = call.argument("streamId"); - mediaStreamRelease(streamId); - result.success(null); - }else if (call.method.equals("mediaStreamTrackSetEnable")) { - String trackId = call.argument("trackId"); - Boolean enabled = call.argument("enabled"); - MediaStreamTrack track = getTrackForId(trackId); - if(track != null){ - track.setEnabled(enabled); - } - result.success(null); - }else if (call.method.equals("mediaStreamAddTrack")) { - String streamId = call.argument("streamId"); - String trackId = call.argument("trackId"); - mediaStreamAddTrack(streamId, trackId, result); - }else if (call.method.equals("mediaStreamRemoveTrack")) { - String streamId = call.argument("streamId"); - String trackId = call.argument("trackId"); - mediaStreamRemoveTrack(streamId,trackId, result); - } else if (call.method.equals("trackDispose")) { - String trackId = call.argument("trackId"); - localTracks.remove(trackId); - result.success(null); - } else if (call.method.equals("peerConnectionClose")) { - String peerConnectionId = call.argument("peerConnectionId"); - peerConnectionClose(peerConnectionId); - result.success(null); - } else if(call.method.equals("peerConnectionDispose")){ - String peerConnectionId = call.argument("peerConnectionId"); - peerConnectionDispose(peerConnectionId); - result.success(null); - }else if (call.method.equals("createVideoRenderer")) { - TextureRegistry.SurfaceTextureEntry entry = textures.createSurfaceTexture(); - SurfaceTexture surfaceTexture = entry.surfaceTexture(); - FlutterRTCVideoRenderer render = new FlutterRTCVideoRenderer(surfaceTexture, entry); - renders.put(entry.id(), render); - - EventChannel eventChannel = - new EventChannel( - registrar.messenger(), - "FlutterWebRTC/Texture" + entry.id()); - - eventChannel.setStreamHandler(render); - render.setEventChannel(eventChannel); - render.setId((int)entry.id()); - - ConstraintsMap params = new ConstraintsMap(); - params.putInt("textureId", (int)entry.id()); - result.success(params.toMap()); - } else if (call.method.equals("videoRendererDispose")) { - int textureId = call.argument("textureId"); - FlutterRTCVideoRenderer render = renders.get(textureId); - if(render == null ){ - result.error("FlutterRTCVideoRendererNotFound", "render [" + textureId + "] not found !", null); - return; - } - render.Dispose(); - renders.delete(textureId); - result.success(null); - } else if (call.method.equals("videoRendererSetSrcObject")) { - int textureId = call.argument("textureId"); - String streamId = call.argument("streamId"); - String peerConnectionId = call.argument("ownerTag"); - FlutterRTCVideoRenderer render = renders.get(textureId); - - if (render == null) { - result.error("FlutterRTCVideoRendererNotFound", "render [" + textureId + "] not found !", null); - return; - } - - MediaStream stream = getStreamForId(streamId, peerConnectionId); - render.setStream(stream); - result.success(null); - } else if (call.method.equals("mediaStreamTrackHasTorch")) { - String trackId = call.argument("trackId"); - getUserMediaImpl.hasTorch(trackId, result); - } else if (call.method.equals("mediaStreamTrackSetTorch")) { - String trackId = call.argument("trackId"); - boolean torch = call.argument("torch"); - getUserMediaImpl.setTorch(trackId, torch, result); - } else if (call.method.equals("mediaStreamTrackSwitchCamera")) { - String trackId = call.argument("trackId"); - getUserMediaImpl.switchCamera(trackId, result); - } else if (call.method.equals("setVolume")) { - String trackId = call.argument("trackId"); - double volume = call.argument("volume"); - mediaStreamTrackSetVolume(trackId, volume); - result.success(null); - } else if (call.method.equals("setMicrophoneMute")) { - boolean mute = call.argument("mute"); - rtcAudioManager.setMicrophoneMute(mute); - result.success(null); - } else if (call.method.equals("enableSpeakerphone")) { - boolean enable = call.argument("enable"); - if(rtcAudioManager == null ){ - startAudioManager(); - } - rtcAudioManager.setSpeakerphoneOn(enable); - result.success(null); - } else if(call.method.equals("getDisplayMedia")) { - Map constraints = call.argument("constraints"); - ConstraintsMap constraintsMap = new ConstraintsMap(constraints); - getDisplayMedia(constraintsMap, result); - }else if (call.method.equals("startRecordToFile")) { - //This method can a lot of different exceptions - //so we should notify plugin user about them - try { - String path = call.argument("path"); - VideoTrack videoTrack = null; - String videoTrackId = call.argument("videoTrackId"); - if (videoTrackId != null) { - MediaStreamTrack track = getTrackForId(videoTrackId); - if (track instanceof VideoTrack) - videoTrack = (VideoTrack) track; - } - AudioChannel audioChannel = null; - if (call.hasArgument("audioChannel")) - audioChannel = AudioChannel.values()[(Integer) call.argument("audioChannel")]; - Integer recorderId = call.argument("recorderId"); - if (videoTrack != null || audioChannel != null) { - getUserMediaImpl.startRecordingToFile(path, recorderId, videoTrack, audioChannel); - result.success(null); - } else { - result.error("0", "No tracks", null); - } - } catch (Exception e) { - result.error("-1", e.getMessage(), e); - } - } else if (call.method.equals("stopRecordToFile")) { - Integer recorderId = call.argument("recorderId"); - getUserMediaImpl.stopRecording(recorderId); - result.success(null); - } else if (call.method.equals("captureFrame")) { - String path = call.argument("path"); - String videoTrackId = call.argument("trackId"); - if (videoTrackId != null) { - MediaStreamTrack track = getTrackForId(videoTrackId); - if (track instanceof VideoTrack) - new FrameCapturer((VideoTrack) track, new File(path), result); - else - result.error(null, "It's not video track", null); +public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware { + + static public final String TAG = "FlutterWebRTCPlugin"; + + private RTCAudioManager rtcAudioManager; + private MethodChannel channel; + private MethodCallHandlerImpl methodCallHandler; + + public FlutterWebRTCPlugin() { + } + + /** + * Plugin registration. + */ + public static void registerWith(Registrar registrar) { + final FlutterWebRTCPlugin plugin = new FlutterWebRTCPlugin(); + + plugin.startListening(registrar.context(), registrar.messenger(), registrar.textures()); + + if (registrar.activeContext() instanceof Activity) { + plugin.methodCallHandler.setActivity((Activity) registrar.activeContext()); + } + + registrar.addViewDestroyListener(view -> { + plugin.stopListening(); + return false; + }); + } + + @Override + public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) { + startListening(binding.getApplicationContext(), binding.getBinaryMessenger(), + binding.getTextureRegistry()); + } + + @Override + public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) { + stopListening(); + } + + @Override + public void onAttachedToActivity(@NonNull ActivityPluginBinding binding) { + methodCallHandler.setActivity(binding.getActivity()); + } + + @Override + public void onDetachedFromActivityForConfigChanges() { + methodCallHandler.setActivity(null); + } + + @Override + public void onReattachedToActivityForConfigChanges(@NonNull ActivityPluginBinding binding) { + methodCallHandler.setActivity(binding.getActivity()); + } + + @Override + public void onDetachedFromActivity() { + methodCallHandler.setActivity(null); + } + + private void startListening(final Context context, BinaryMessenger messenger, + TextureRegistry textureRegistry) { + methodCallHandler = new MethodCallHandlerImpl(context, messenger, textureRegistry, + new AudioManager() { + @Override + public void onAudioManagerRequested(boolean requested) { + if (requested) { + if (rtcAudioManager == null) { + rtcAudioManager = RTCAudioManager.create(context); + } + rtcAudioManager.start(FlutterWebRTCPlugin.this::onAudioManagerDevicesChanged); } else { - result.error(null, "Track is null", null); + if (rtcAudioManager != null) { + rtcAudioManager.stop(); + rtcAudioManager = null; + } } - } else if (call.method.equals("getLocalDescription")) { - String peerConnectionId = call.argument("peerConnectionId"); - PeerConnection peerConnection = getPeerConnection(peerConnectionId); - if (peerConnection != null) { - SessionDescription sdp = peerConnection.getLocalDescription(); - ConstraintsMap params = new ConstraintsMap(); - params.putString("sdp", sdp.description); - params.putString("type", sdp.type.canonicalForm()); - result.success(params.toMap()); - } else { - Log.d(TAG, "getLocalDescription() peerConnection is null"); - result.error("getLocalDescriptionFailed", "getLocalDescription() peerConnection is null", null); - } - } else if (call.method.equals("getRemoteDescription")) { - String peerConnectionId = call.argument("peerConnectionId"); - PeerConnection peerConnection = getPeerConnection(peerConnectionId); - if (peerConnection != null) { - SessionDescription sdp = peerConnection.getRemoteDescription(); - ConstraintsMap params = new ConstraintsMap(); - params.putString("sdp", sdp.description); - params.putString("type", sdp.type.canonicalForm()); - result.success(params.toMap()); - } else { - Log.d(TAG, "getRemoteDescription() peerConnection is null"); - result.error("getRemoteDescriptionFailed", "getRemoteDescription() peerConnection is null", null); - } - } else if (call.method.equals("setConfiguration")) { - String peerConnectionId = call.argument("peerConnectionId"); - Map configuration = call.argument("configuration"); - PeerConnection peerConnection = getPeerConnection(peerConnectionId); - if (peerConnection != null) { - peerConnectionSetConfiguration(new ConstraintsMap(configuration), peerConnection); - result.success(null); - } else { - Log.d(TAG, "setConfiguration() peerConnection is null"); - result.error("setConfigurationFailed", "setConfiguration() peerConnection is null", null); - } - } else { - result.notImplemented(); - } - } - - private PeerConnection getPeerConnection(String id) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(id); - return (pco == null) ? null : pco.getPeerConnection(); - } - - private List createIceServers(ConstraintsArray iceServersArray) { - final int size = (iceServersArray == null) ? 0 : iceServersArray.size(); - List iceServers = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - ConstraintsMap iceServerMap = iceServersArray.getMap(i); - boolean hasUsernameAndCredential = iceServerMap.hasKey("username") && iceServerMap.hasKey("credential"); - if (iceServerMap.hasKey("url")) { - if (hasUsernameAndCredential) { - iceServers.add(PeerConnection.IceServer.builder(iceServerMap.getString("url")).setUsername(iceServerMap.getString("username")).setPassword(iceServerMap.getString("credential")).createIceServer()); - } else { - iceServers.add(PeerConnection.IceServer.builder(iceServerMap.getString("url")).createIceServer()); - } - } else if (iceServerMap.hasKey("urls")) { - switch (iceServerMap.getType("urls")) { - case String: - if (hasUsernameAndCredential) { - iceServers.add(PeerConnection.IceServer.builder(iceServerMap.getString("urls")).setUsername(iceServerMap.getString("username")).setPassword(iceServerMap.getString("credential")).createIceServer()); - } else { - iceServers.add(PeerConnection.IceServer.builder(iceServerMap.getString("urls")).createIceServer()); - } - break; - case Array: - ConstraintsArray urls = iceServerMap.getArray("urls"); - List urlsList = new ArrayList<>(); - - for (int j = 0; j < urls.size(); j++) { - urlsList.add(urls.getString(j)); - } - - PeerConnection.IceServer.Builder builder = PeerConnection.IceServer.builder(urlsList); - - if (hasUsernameAndCredential) { - builder - .setUsername(iceServerMap.getString("username")) - .setPassword(iceServerMap.getString("credential")); - } - - iceServers.add(builder.createIceServer()); - - break; - } - } - } - return iceServers; - } - - private PeerConnection.RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { - ConstraintsArray iceServersArray = null; - if (map != null) { - iceServersArray = map.getArray("iceServers"); - } - List iceServers = createIceServers(iceServersArray); - PeerConnection.RTCConfiguration conf = new PeerConnection.RTCConfiguration(iceServers); - if (map == null) { - return conf; - } - - // iceTransportPolicy (public api) - if (map.hasKey("iceTransportPolicy") - && map.getType("iceTransportPolicy") == ObjectType.String) { - final String v = map.getString("iceTransportPolicy"); - if (v != null) { - switch (v) { - case "all": // public - conf.iceTransportsType = PeerConnection.IceTransportsType.ALL; - break; - case "relay": // public - conf.iceTransportsType = PeerConnection.IceTransportsType.RELAY; - break; - case "nohost": - conf.iceTransportsType = PeerConnection.IceTransportsType.NOHOST; - break; - case "none": - conf.iceTransportsType = PeerConnection.IceTransportsType.NONE; - break; - } - } - } - - // bundlePolicy (public api) - if (map.hasKey("bundlePolicy") - && map.getType("bundlePolicy") == ObjectType.String) { - final String v = map.getString("bundlePolicy"); - if (v != null) { - switch (v) { - case "balanced": // public - conf.bundlePolicy = PeerConnection.BundlePolicy.BALANCED; - break; - case "max-compat": // public - conf.bundlePolicy = PeerConnection.BundlePolicy.MAXCOMPAT; - break; - case "max-bundle": // public - conf.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; - break; - } - } - } - - // rtcpMuxPolicy (public api) - if (map.hasKey("rtcpMuxPolicy") - && map.getType("rtcpMuxPolicy") == ObjectType.String) { - final String v = map.getString("rtcpMuxPolicy"); - if (v != null) { - switch (v) { - case "negotiate": // public - conf.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.NEGOTIATE; - break; - case "require": // public - conf.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; - break; - } - } - } - - // FIXME: peerIdentity of type DOMString (public api) - // FIXME: certificates of type sequence (public api) - - // iceCandidatePoolSize of type unsigned short, defaulting to 0 - if (map.hasKey("iceCandidatePoolSize") - && map.getType("iceCandidatePoolSize") == ObjectType.Number) { - final int v = map.getInt("iceCandidatePoolSize"); - if (v > 0) { - conf.iceCandidatePoolSize = v; - } - } - - // sdpSemantics - if (map.hasKey("sdpSemantics") - && map.getType("sdpSemantics") == ObjectType.String) { - final String v = map.getString("sdpSemantics"); - if (v != null) { - switch (v) { - case "plan-b": - conf.sdpSemantics = PeerConnection.SdpSemantics.PLAN_B; - break; - case "unified-plan": - conf.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN; - break; - } - } - } + } - // === below is private api in webrtc === - - // tcpCandidatePolicy (private api) - if (map.hasKey("tcpCandidatePolicy") - && map.getType("tcpCandidatePolicy") == ObjectType.String) { - final String v = map.getString("tcpCandidatePolicy"); - if (v != null) { - switch (v) { - case "enabled": - conf.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.ENABLED; - break; - case "disabled": - conf.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; - break; - } - } - } - - // candidateNetworkPolicy (private api) - if (map.hasKey("candidateNetworkPolicy") - && map.getType("candidateNetworkPolicy") == ObjectType.String) { - final String v = map.getString("candidateNetworkPolicy"); - if (v != null) { - switch (v) { - case "all": - conf.candidateNetworkPolicy = PeerConnection.CandidateNetworkPolicy.ALL; - break; - case "low_cost": - conf.candidateNetworkPolicy = PeerConnection.CandidateNetworkPolicy.LOW_COST; - break; - } - } - } - - // KeyType (private api) - if (map.hasKey("keyType") - && map.getType("keyType") == ObjectType.String) { - final String v = map.getString("keyType"); - if (v != null) { - switch (v) { - case "RSA": - conf.keyType = PeerConnection.KeyType.RSA; - break; - case "ECDSA": - conf.keyType = PeerConnection.KeyType.ECDSA; - break; - } - } - } - - // continualGatheringPolicy (private api) - if (map.hasKey("continualGatheringPolicy") - && map.getType("continualGatheringPolicy") == ObjectType.String) { - final String v = map.getString("continualGatheringPolicy"); - if (v != null) { - switch (v) { - case "gather_once": - conf.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_ONCE; - break; - case "gather_continually": - conf.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY; - break; - } + @Override + public void setMicrophoneMute(boolean mute) { + if (rtcAudioManager != null) { + rtcAudioManager.setMicrophoneMute(mute); } - } + } - // audioJitterBufferMaxPackets (private api) - if (map.hasKey("audioJitterBufferMaxPackets") - && map.getType("audioJitterBufferMaxPackets") == ObjectType.Number) { - final int v = map.getInt("audioJitterBufferMaxPackets"); - if (v > 0) { - conf.audioJitterBufferMaxPackets = v; + @Override + public void setSpeakerphoneOn(boolean on) { + if (rtcAudioManager != null) { + rtcAudioManager.setSpeakerphoneOn(on); } - } - - // iceConnectionReceivingTimeout (private api) - if (map.hasKey("iceConnectionReceivingTimeout") - && map.getType("iceConnectionReceivingTimeout") == ObjectType.Number) { - final int v = map.getInt("iceConnectionReceivingTimeout"); - conf.iceConnectionReceivingTimeout = v; - } - - // iceBackupCandidatePairPingInterval (private api) - if (map.hasKey("iceBackupCandidatePairPingInterval") - && map.getType("iceBackupCandidatePairPingInterval") == ObjectType.Number) { - final int v = map.getInt("iceBackupCandidatePairPingInterval"); - conf.iceBackupCandidatePairPingInterval = v; - } - - // audioJitterBufferFastAccelerate (private api) - if (map.hasKey("audioJitterBufferFastAccelerate") - && map.getType("audioJitterBufferFastAccelerate") == ObjectType.Boolean) { - final boolean v = map.getBoolean("audioJitterBufferFastAccelerate"); - conf.audioJitterBufferFastAccelerate = v; - } - - // pruneTurnPorts (private api) - if (map.hasKey("pruneTurnPorts") - && map.getType("pruneTurnPorts") == ObjectType.Boolean) { - final boolean v = map.getBoolean("pruneTurnPorts"); - conf.pruneTurnPorts = v; - } - - // presumeWritableWhenFullyRelayed (private api) - if (map.hasKey("presumeWritableWhenFullyRelayed") - && map.getType("presumeWritableWhenFullyRelayed") == ObjectType.Boolean) { - final boolean v = map.getBoolean("presumeWritableWhenFullyRelayed"); - conf.presumeWritableWhenFullyRelayed = v; - } - - return conf; - } - - public String peerConnectionInit( - ConstraintsMap configuration, - ConstraintsMap constraints) { - - String peerConnectionId = getNextStreamUUID(); - PeerConnectionObserver observer = new PeerConnectionObserver(this, peerConnectionId); - PeerConnection peerConnection - = mFactory.createPeerConnection( - parseRTCConfiguration(configuration), - parseMediaConstraints(constraints), - observer); - observer.setPeerConnection(peerConnection); - if(mPeerConnectionObservers.size() == 0) { - startAudioManager(); - } - mPeerConnectionObservers.put(peerConnectionId, observer); - return peerConnectionId; - } - - String getNextStreamUUID() { - String uuid; - - do { - uuid = UUID.randomUUID().toString(); - } while (getStreamForId(uuid,"") != null); - - return uuid; - } - - String getNextTrackUUID() { - String uuid; - - do { - uuid = UUID.randomUUID().toString(); - } while (getTrackForId(uuid) != null); - - return uuid; - } - - MediaStream getStreamForId(String id, String peerConnectionId) { - MediaStream stream = localStreams.get(id); - - if (stream == null) { - if (peerConnectionId.length() > 0) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); - stream = pco.remoteStreams.get(id); - } else { - for (Map.Entry entry : mPeerConnectionObservers.entrySet()) { - PeerConnectionObserver pco = entry.getValue(); - stream = pco.remoteStreams.get(id); - if (stream != null) { - break; - } - } - } - } - - return stream; - } - - private MediaStreamTrack getTrackForId(String trackId) { - MediaStreamTrack track = localTracks.get(trackId); - - if (track == null) { - for (Map.Entry entry : mPeerConnectionObservers.entrySet()) { - PeerConnectionObserver pco = entry.getValue(); - track = pco.remoteTracks.get(trackId); - if (track != null) { - break; - } - } - } - - return track; - } - - /** - * Parses a constraint set specified in the form of a JavaScript object into - * a specific List of MediaConstraints.KeyValuePairs. - * - * @param src The constraint set in the form of a JavaScript object to - * parse. - * @param dst The List of MediaConstraints.KeyValuePairs - * into which the specified src is to be parsed. - */ - private void parseConstraints( - ConstraintsMap src, - List dst) { - - for (Map.Entry entry : src.toMap().entrySet()) { - String key = entry.getKey(); - String value = getMapStrValue(src, entry.getKey()); - dst.add(new MediaConstraints.KeyValuePair(key, value)); - } - } - - private String getMapStrValue(ConstraintsMap map, String key) { - if(!map.hasKey(key)){ - return null; - } - ObjectType type = map.getType(key); - switch (type) { - case Boolean: - return String.valueOf(map.getBoolean(key)); - case Number: - // Don't know how to distinguish between Int and Double from - // ReadableType.Number. 'getInt' will fail on double value, - // while 'getDouble' works for both. - // return String.valueOf(map.getInt(key)); - return String.valueOf(map.getDouble(key)); - case String: - return map.getString(key); - default: - return null; - } - } - - /** - * Parses mandatory and optional "GUM" constraints described by a specific - * ConstraintsMap. - * - * @param constraints A ConstraintsMap which represents a JavaScript - * object specifying the constraints to be parsed into a - * MediaConstraints instance. - * @return A new MediaConstraints instance initialized with the - * mandatory and optional constraint keys and values specified by - * constraints. - */ - MediaConstraints parseMediaConstraints(ConstraintsMap constraints) { - MediaConstraints mediaConstraints = new MediaConstraints(); - - if (constraints.hasKey("mandatory") - && constraints.getType("mandatory") == ObjectType.Map) { - parseConstraints(constraints.getMap("mandatory"), - mediaConstraints.mandatory); - } else { - Log.d(TAG, "mandatory constraints are not a map"); - } - - if (constraints.hasKey("optional") - && constraints.getType("optional") == ObjectType.Array) { - ConstraintsArray optional = constraints.getArray("optional"); - - for (int i = 0, size = optional.size(); i < size; i++) { - if (optional.getType(i) == ObjectType.Map) { - parseConstraints( - optional.getMap(i), - mediaConstraints.optional); - } - } - } else { - Log.d(TAG, "optional constraints are not an array"); - } - - return mediaConstraints; - } - - public void getUserMedia(ConstraintsMap constraints, Result result) { - String streamId = getNextStreamUUID(); - MediaStream mediaStream = mFactory.createLocalMediaStream(streamId); - - if (mediaStream == null) { - // XXX The following does not follow the getUserMedia() algorithm - // specified by - // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia - // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "getUserMediaFailed", - "Failed to create new media stream", null); - return; - } - - getUserMediaImpl.getUserMedia(constraints, result, mediaStream); - } - - public void getDisplayMedia(ConstraintsMap constraints, Result result) { - String streamId = getNextStreamUUID(); - MediaStream mediaStream = mFactory.createLocalMediaStream(streamId); - - if (mediaStream == null) { - // XXX The following does not follow the getUserMedia() algorithm - // specified by - // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia - // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "getDisplayMedia", - "Failed to create new media stream", null); - return; - } - - getUserMediaImpl.getDisplayMedia(constraints, result, mediaStream); - } - - public void getSources(Result result) { - ConstraintsArray array = new ConstraintsArray(); - String[] names = new String[Camera.getNumberOfCameras()]; - - for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { - ConstraintsMap info = getCameraInfo(i); - if (info != null) { - array.pushMap(info); - } - } - - ConstraintsMap audio = new ConstraintsMap(); - audio.putString("label", "Audio"); - audio.putString("deviceId", "audio-1"); - audio.putString("facing", ""); - audio.putString("kind", "audioinput"); - array.pushMap(audio); - result.success(array); - } - - private void createLocalMediaStream(Result result) { - String streamId = getNextStreamUUID(); - MediaStream mediaStream = mFactory.createLocalMediaStream(streamId); - localStreams.put(streamId, mediaStream); - - if (mediaStream == null) { - result.error(/* type */ "createLocalMediaStream", "Failed to create new media stream", null); - return; - } - Map resultMap = new HashMap<>(); - resultMap.put("streamId", mediaStream.getId()); - result.success(resultMap); - } - - public void mediaStreamTrackStop(final String id) { - // Is this functionality equivalent to `mediaStreamTrackRelease()` ? - // if so, we should merge this two and remove track from stream as well. - MediaStreamTrack track = localTracks.get(id); - if (track == null) { - Log.d(TAG, "mediaStreamTrackStop() track is null"); - return; - } - track.setEnabled(false); - if (track.kind().equals("video")) { - getUserMediaImpl.removeVideoCapturer(id); - } - localTracks.remove(id); - // What exactly does `detached` mean in doc? - // see: https://www.w3.org/TR/mediacapture-streams/#track-detached - } - - public void mediaStreamTrackSetEnabled(final String id, final boolean enabled) { - MediaStreamTrack track = localTracks.get(id); - if (track == null) { - Log.d(TAG, "mediaStreamTrackSetEnabled() track is null"); - return; - } else if (track.enabled() == enabled) { - return; - } - track.setEnabled(enabled); - } - - public void mediaStreamTrackSetVolume(final String id, final double volume) { - MediaStreamTrack track = localTracks.get(id); - if (track != null && track instanceof AudioTrack) { - Log.d(TAG, "setVolume(): " + id + "," + volume); - try { - ((AudioTrack)track).setVolume(volume); - } catch (Exception e) { - Log.e(TAG, "setVolume(): error", e); - } - } else { - Log.w(TAG, "setVolume(): track not found: " + id); - } - } - - public void mediaStreamAddTrack(final String streaemId, final String trackId, Result result) { - MediaStream mediaStream = localStreams.get(streaemId); - if (mediaStream != null) { - MediaStreamTrack track = localTracks.get(trackId); - if (track != null){ - if (track.kind().equals("audio")) { - mediaStream.addTrack((AudioTrack) track); - } else if (track.kind().equals("video")) { - mediaStream.addTrack((VideoTrack) track); - } - } else { - String errorMsg = "mediaStreamAddTrack() track [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamAddTrack", errorMsg, null); - } - } else { - String errorMsg = "mediaStreamAddTrack() stream [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamAddTrack", errorMsg, null); - } - result.success(null); - } - - public void mediaStreamRemoveTrack(final String streaemId, final String trackId, Result result) { - MediaStream mediaStream = localStreams.get(streaemId); - if (mediaStream != null) { - MediaStreamTrack track = localTracks.get(trackId); - if (track != null) { - if (track.kind().equals("audio")) { - mediaStream.removeTrack((AudioTrack) track); - } else if (track.kind().equals("video")) { - mediaStream.removeTrack((VideoTrack) track); - } - } else { - String errorMsg = "mediaStreamRemoveTrack() track [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamRemoveTrack", errorMsg, null); - } - } else { - String errorMsg = "mediaStreamRemoveTrack() stream [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamRemoveTrack", errorMsg, null); - } - result.success(null); - } - - public void mediaStreamTrackRelease(final String streamId, final String _trackId) { - MediaStream stream = localStreams.get(streamId); - if (stream == null) { - Log.d(TAG, "mediaStreamTrackRelease() stream is null"); - return; - } - MediaStreamTrack track = localTracks.get(_trackId); - if (track == null) { - Log.d(TAG, "mediaStreamTrackRelease() track is null"); - return; - } - track.setEnabled(false); // should we do this? - localTracks.remove(_trackId); - if (track.kind().equals("audio")) { - stream.removeTrack((AudioTrack) track); - } else if (track.kind().equals("video")) { - stream.removeTrack((VideoTrack) track); - getUserMediaImpl.removeVideoCapturer(_trackId); - } - } - - public ConstraintsMap getCameraInfo(int index) { - Camera.CameraInfo info = new Camera.CameraInfo(); - - try { - Camera.getCameraInfo(index, info); - } catch (Exception e) { - Logging.e("CameraEnumerationAndroid", "getCameraInfo failed on index " + index, e); - return null; - } - ConstraintsMap params = new ConstraintsMap(); - String facing = info.facing == 1 ? "front" : "back"; - params.putString("label", "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation); - params.putString("deviceId", "" + index); - params.putString("facing", facing); - params.putString("kind", "videoinput"); - return params; - } - - private MediaConstraints defaultConstraints() { - MediaConstraints constraints = new MediaConstraints(); - // TODO video media - constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); - constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")); - constraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true")); - return constraints; - } - - public void peerConnectionSetConfiguration(ConstraintsMap configuration, PeerConnection peerConnection) { - if (peerConnection == null) { - Log.d(TAG, "peerConnectionSetConfiguration() peerConnection is null"); - return; - } - peerConnection.setConfiguration(parseRTCConfiguration(configuration)); - } - - public void peerConnectionAddStream(final String streamId, final String id, Result result) { - MediaStream mediaStream = localStreams.get(streamId); - if (mediaStream == null) { - Log.d(TAG, "peerConnectionAddStream() mediaStream is null"); - return; - } - PeerConnection peerConnection = getPeerConnection(id); - if (peerConnection != null) { - boolean res = peerConnection.addStream(mediaStream); - Log.d(TAG, "addStream" + result); - result.success(res); - } else { - Log.d(TAG, "peerConnectionAddStream() peerConnection is null"); - result.error("peerConnectionAddStreamFailed", "peerConnectionAddStream() peerConnection is null", null); - } - } - - public void peerConnectionRemoveStream(final String streamId, final String id, Result result) { - MediaStream mediaStream = localStreams.get(streamId); - if (mediaStream == null) { - Log.d(TAG, "peerConnectionRemoveStream() mediaStream is null"); - return; - } - PeerConnection peerConnection = getPeerConnection(id); - if (peerConnection != null) { - peerConnection.removeStream(mediaStream); - result.success(null); - } else { - Log.d(TAG, "peerConnectionRemoveStream() peerConnection is null"); - result.error("peerConnectionRemoveStreamFailed", "peerConnectionAddStream() peerConnection is null", null); - } - } - - public void peerConnectionCreateOffer( - String id, - ConstraintsMap constraints, - final Result result) { - PeerConnection peerConnection = getPeerConnection(id); - - if (peerConnection != null) { - peerConnection.createOffer(new SdpObserver() { - @Override - public void onCreateFailure(String s) { - result.error("WEBRTC_CREATE_OFFER_ERROR", s, null); - } - - @Override - public void onCreateSuccess(final SessionDescription sdp) { - ConstraintsMap params = new ConstraintsMap(); - params.putString("sdp", sdp.description); - params.putString("type", sdp.type.canonicalForm()); - result.success(params.toMap()); - } - - @Override - public void onSetFailure(String s) { - } - - @Override - public void onSetSuccess() { - } - }, parseMediaConstraints(constraints)); - } else { - Log.d(TAG, "peerConnectionCreateOffer() peerConnection is null"); - result.error("WEBRTC_CREATE_OFFER_ERROR", "peerConnection is null", null); - } - } - - public void peerConnectionCreateAnswer( - String id, - ConstraintsMap constraints, - final Result result) { - PeerConnection peerConnection = getPeerConnection(id); - - if (peerConnection != null) { - peerConnection.createAnswer(new SdpObserver() { - @Override - public void onCreateFailure(String s) { - result.error("WEBRTC_CREATE_ANSWER_ERROR", s, null); - } - - @Override - public void onCreateSuccess(final SessionDescription sdp) { - ConstraintsMap params = new ConstraintsMap(); - params.putString("sdp", sdp.description); - params.putString("type", sdp.type.canonicalForm()); - result.success(params.toMap()); - } - - @Override - public void onSetFailure(String s) { - } - - @Override - public void onSetSuccess() { - } - }, parseMediaConstraints(constraints)); - } else { - Log.d(TAG, "peerConnectionCreateAnswer() peerConnection is null"); - result.error("WEBRTC_CREATE_ANSWER_ERROR", "peerConnection is null", null); - } - } - - public void peerConnectionSetLocalDescription(ConstraintsMap sdpMap, final String id, final Result result) { - PeerConnection peerConnection = getPeerConnection(id); - - Log.d(TAG, "peerConnectionSetLocalDescription() start"); - if (peerConnection != null) { - SessionDescription sdp = new SessionDescription( - SessionDescription.Type.fromCanonicalForm(sdpMap.getString("type")), - sdpMap.getString("sdp") - ); - - peerConnection.setLocalDescription(new SdpObserver() { - @Override - public void onCreateSuccess(final SessionDescription sdp) { - } - - @Override - public void onSetSuccess() { - result.success(null); - } - - @Override - public void onCreateFailure(String s) { - } - - @Override - public void onSetFailure(String s) { - result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", s, null); - } - }, sdp); - } else { - Log.d(TAG, "peerConnectionSetLocalDescription() peerConnection is null"); - result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", "peerConnection is null", null); - } - Log.d(TAG, "peerConnectionSetLocalDescription() end"); - } - - public void peerConnectionSetRemoteDescription(final ConstraintsMap sdpMap, final String id, final Result result) { - PeerConnection peerConnection = getPeerConnection(id); - // final String d = sdpMap.getString("type"); - - Log.d(TAG, "peerConnectionSetRemoteDescription() start"); - if (peerConnection != null) { - SessionDescription sdp = new SessionDescription( - SessionDescription.Type.fromCanonicalForm(sdpMap.getString("type")), - sdpMap.getString("sdp") - ); - - peerConnection.setRemoteDescription(new SdpObserver() { - @Override - public void onCreateSuccess(final SessionDescription sdp) { - } - - @Override - public void onSetSuccess() { - result.success(null); - } - - @Override - public void onCreateFailure(String s) { - } - - @Override - public void onSetFailure(String s) { - result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", s, null); - } - }, sdp); - } else { - Log.d(TAG, "peerConnectionSetRemoteDescription() peerConnection is null"); - result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", "peerConnection is null", null); - } - Log.d(TAG, "peerConnectionSetRemoteDescription() end"); - } + } + }); - public void peerConnectionAddICECandidate(ConstraintsMap candidateMap, final String id, final Result result) { - boolean res = false; - PeerConnection peerConnection = getPeerConnection(id); - Log.d(TAG, "peerConnectionAddICECandidate() start"); - if (peerConnection != null) { - IceCandidate candidate = new IceCandidate( - candidateMap.getString("sdpMid"), - candidateMap.getInt("sdpMLineIndex"), - candidateMap.getString("candidate") - ); - res = peerConnection.addIceCandidate(candidate); - } else { - Log.d(TAG, "peerConnectionAddICECandidate() peerConnection is null"); - result.error("peerConnectionAddICECandidateFailed", "peerConnectionAddICECandidate() peerConnection is null", null); - } - result.success(res); - Log.d(TAG, "peerConnectionAddICECandidate() end"); - } + channel = new MethodChannel(messenger, "FlutterWebRTC.Method"); + channel.setMethodCallHandler(methodCallHandler); + } - public void peerConnectionGetStats(String trackId, String id, final Result result) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(id); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "peerConnectionGetStats() peerConnection is null"); - } else { - pco.getStats(trackId, result); - } - } + private void stopListening() { + channel.setMethodCallHandler(null); + methodCallHandler = null; - public void peerConnectionClose(final String id) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(id); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "peerConnectionClose() peerConnection is null"); - } else { - pco.close(); - } - } - public void peerConnectionDispose(final String id) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(id); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "peerConnectionDispose() peerConnection is null"); - } else { - pco.dispose(); - mPeerConnectionObservers.remove(id); - } - if(mPeerConnectionObservers.size() == 0) { - stopAudioManager(); - } + if (rtcAudioManager != null) { + Log.d(TAG, "Stopping the audio manager..."); + rtcAudioManager.stop(); + rtcAudioManager = null; } + } - public void mediaStreamRelease(final String id) { - MediaStream mediaStream = localStreams.get(id); - if (mediaStream != null) { - for (VideoTrack track : mediaStream.videoTracks) { - localTracks.remove(track.id()); - getUserMediaImpl.removeVideoCapturer(track.id()); - } - for (AudioTrack track : mediaStream.audioTracks) { - localTracks.remove(track.id()); - } - localStreams.remove(id); - } else { - Log.d(TAG, "mediaStreamRelease() mediaStream is null"); - } - } - - public void createDataChannel(final String peerConnectionId, String label, ConstraintsMap config, Result result) { - // Forward to PeerConnectionObserver which deals with DataChannels - // because DataChannel is owned by PeerConnection. - PeerConnectionObserver pco - = mPeerConnectionObservers.get(peerConnectionId); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "createDataChannel() peerConnection is null"); - } else { - pco.createDataChannel(label, config, result); - } - } + // This method is called when the audio manager reports audio device change, + // e.g. from wired headset to speakerphone. + private void onAudioManagerDevicesChanged( + final RTCAudioManager.AudioDevice device, + final Set availableDevices) { + Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", " + + "selected: " + device); + // TODO(henrika): add callback handler. + } - public void dataChannelSend(String peerConnectionId, int dataChannelId, ByteBuffer bytebuffer, Boolean isBinary) { - // Forward to PeerConnectionObserver which deals with DataChannels - // because DataChannel is owned by PeerConnection. - PeerConnectionObserver pco - = mPeerConnectionObservers.get(peerConnectionId); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "dataChannelSend() peerConnection is null"); - } else { - pco.dataChannelSend(dataChannelId, bytebuffer, isBinary); - } - } - - public void dataChannelClose(String peerConnectionId, int dataChannelId) { - // Forward to PeerConnectionObserver which deals with DataChannels - // because DataChannel is owned by PeerConnection. - PeerConnectionObserver pco - = mPeerConnectionObservers.get(peerConnectionId); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "dataChannelClose() peerConnection is null"); - } else { - pco.dataChannelClose(dataChannelId); - } - } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java index fb38ae4f84..ce39424275 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java @@ -1,35 +1,38 @@ package com.cloudwebrtc.webrtc; import android.Manifest; +import android.app.Activity; import android.app.Fragment; import android.app.FragmentTransaction; import android.content.ContentValues; import android.content.Context; +import android.content.Intent; import android.content.pm.PackageManager; import android.hardware.Camera; +import android.hardware.Camera.Parameters; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CaptureRequest; +import android.media.projection.MediaProjection; +import android.media.projection.MediaProjectionManager; import android.os.Build; +import android.os.Build.VERSION; +import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.ResultReceiver; import android.provider.MediaStore; -import androidx.annotation.Nullable; import android.util.Log; -import android.content.Intent; -import android.app.Activity; -import android.view.Surface; -import android.view.WindowManager; -import android.media.projection.MediaProjection; -import android.media.projection.MediaProjectionManager; import android.util.Range; import android.util.SparseArray; - +import android.view.Surface; +import android.view.WindowManager; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; import com.cloudwebrtc.webrtc.record.AudioChannel; import com.cloudwebrtc.webrtc.record.AudioSamplesInterceptor; import com.cloudwebrtc.webrtc.record.MediaRecorderImpl; @@ -38,931 +41,980 @@ import com.cloudwebrtc.webrtc.utils.ConstraintsArray; import com.cloudwebrtc.webrtc.utils.ConstraintsMap; import com.cloudwebrtc.webrtc.utils.EglUtils; +import com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils; import com.cloudwebrtc.webrtc.utils.ObjectType; import com.cloudwebrtc.webrtc.utils.PermissionUtils; - +import io.flutter.plugin.common.MethodChannel.Result; import java.io.File; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; - -import org.webrtc.*; -import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.AudioSource; +import org.webrtc.AudioTrack; +import org.webrtc.Camera1Capturer; +import org.webrtc.Camera1Enumerator; +import org.webrtc.Camera2Capturer; +import org.webrtc.Camera2Enumerator; import org.webrtc.CameraEnumerationAndroid.CaptureFormat; - -import io.flutter.plugin.common.MethodChannel.Result; +import org.webrtc.CameraEnumerator; +import org.webrtc.CameraVideoCapturer; +import org.webrtc.MediaConstraints; +import org.webrtc.MediaStream; +import org.webrtc.MediaStreamTrack; +import org.webrtc.PeerConnectionFactory; +import org.webrtc.ScreenCapturerAndroid; +import org.webrtc.SurfaceTextureHelper; +import org.webrtc.VideoCapturer; +import org.webrtc.VideoSource; +import org.webrtc.VideoTrack; +import org.webrtc.audio.JavaAudioDeviceModule; /** - * The implementation of {@code getUserMedia} extracted into a separate file in - * order to reduce complexity and to (somewhat) separate concerns. + * The implementation of {@code getUserMedia} extracted into a separate file in order to reduce + * complexity and to (somewhat) separate concerns. */ -class GetUserMediaImpl{ - private static final int DEFAULT_WIDTH = 1280; - private static final int DEFAULT_HEIGHT = 720; - private static final int DEFAULT_FPS = 30; - - private static final String PERMISSION_AUDIO = Manifest.permission.RECORD_AUDIO; - private static final String PERMISSION_VIDEO = Manifest.permission.CAMERA; - private static final String PERMISSION_SCREEN = "android.permission.MediaProjection"; - private static int CAPTURE_PERMISSION_REQUEST_CODE = 1; - private static final String GRANT_RESULTS = "GRANT_RESULT"; - private static final String PERMISSIONS = "PERMISSION"; - private static final String PROJECTION_DATA = "PROJECTION_DATA"; - private static final String RESULT_RECEIVER = "RESULT_RECEIVER"; - private static final String REQUEST_CODE = "REQUEST_CODE"; - - static final String TAG = FlutterWebRTCPlugin.TAG; +class GetUserMediaImpl { + + private static final int DEFAULT_WIDTH = 1280; + private static final int DEFAULT_HEIGHT = 720; + private static final int DEFAULT_FPS = 30; + + private static final String PERMISSION_AUDIO = Manifest.permission.RECORD_AUDIO; + private static final String PERMISSION_VIDEO = Manifest.permission.CAMERA; + private static final String PERMISSION_SCREEN = "android.permission.MediaProjection"; + private static int CAPTURE_PERMISSION_REQUEST_CODE = 1; + private static final String GRANT_RESULTS = "GRANT_RESULT"; + private static final String PERMISSIONS = "PERMISSION"; + private static final String PROJECTION_DATA = "PROJECTION_DATA"; + private static final String RESULT_RECEIVER = "RESULT_RECEIVER"; + private static final String REQUEST_CODE = "REQUEST_CODE"; + + static final String TAG = FlutterWebRTCPlugin.TAG; + + private final Map mVideoCapturers = new HashMap<>(); + + private final StateProvider stateProvider; + private final Context applicationContext; + + static final int minAPILevel = Build.VERSION_CODES.LOLLIPOP; + private MediaProjectionManager mProjectionManager = null; + private static MediaProjection sMediaProjection = null; + + final AudioSamplesInterceptor inputSamplesInterceptor = new AudioSamplesInterceptor(); + private OutputAudioSamplesInterceptor outputSamplesInterceptor = null; + JavaAudioDeviceModule audioDeviceModule; + private final SparseArray mediaRecorders = new SparseArray<>(); + + public void screenRequestPremissions(ResultReceiver resultReceiver) { + final Activity activity = stateProvider.getActivity(); + if (activity == null) { + // Activity went away, nothing we can do. + return; + } - private final Map mVideoCapturers - = new HashMap(); + Bundle args = new Bundle(); + args.putParcelable(RESULT_RECEIVER, resultReceiver); + args.putInt(REQUEST_CODE, CAPTURE_PERMISSION_REQUEST_CODE); - private final Context applicationContext; - private final FlutterWebRTCPlugin plugin; + ScreenRequestPermissionsFragment fragment = new ScreenRequestPermissionsFragment(); + fragment.setArguments(args); - static final int minAPILevel = Build.VERSION_CODES.LOLLIPOP; - private MediaProjectionManager mProjectionManager = null; - private static MediaProjection sMediaProjection = null; + FragmentTransaction transaction + = activity.getFragmentManager().beginTransaction().add( + fragment, + fragment.getClass().getName()); - final AudioSamplesInterceptor inputSamplesInterceptor = new AudioSamplesInterceptor(); - private OutputAudioSamplesInterceptor outputSamplesInterceptor = null; - JavaAudioDeviceModule audioDeviceModule; - private final SparseArray mediaRecorders = new SparseArray<>(); + try { + transaction.commit(); + } catch (IllegalStateException ise) { - public void screenRequestPremissions(ResultReceiver resultReceiver){ - Activity activity = plugin.getActivity(); + } + } + + public static class ScreenRequestPermissionsFragment extends Fragment { + + private ResultReceiver resultReceiver = null; + private int requestCode = 0; + private int resultCode = 0; + + private void checkSelfPermissions(boolean requestPermissions) { + if (resultCode != Activity.RESULT_OK) { + Activity activity = this.getActivity(); + Bundle args = getArguments(); + resultReceiver = args.getParcelable(RESULT_RECEIVER); + requestCode = args.getInt(REQUEST_CODE); + requestStart(activity, requestCode); + } + } - Bundle args = new Bundle(); - args.putParcelable(RESULT_RECEIVER, resultReceiver); - args.putInt(REQUEST_CODE, CAPTURE_PERMISSION_REQUEST_CODE); + public void requestStart(Activity activity, int requestCode) { + if (android.os.Build.VERSION.SDK_INT < minAPILevel) { + Log.w(TAG, + "Can't run requestStart() due to a low API level. API level 21 or higher is required."); + return; + } else { + MediaProjectionManager mediaProjectionManager = + (MediaProjectionManager) activity.getSystemService( + Context.MEDIA_PROJECTION_SERVICE); + + // call for the projection manager + this.startActivityForResult( + mediaProjectionManager.createScreenCaptureIntent(), requestCode); + } + } - ScreenRequestPermissionsFragment fragment = new ScreenRequestPermissionsFragment(); - fragment.setArguments(args); - FragmentTransaction transaction - = activity.getFragmentManager().beginTransaction().add( - fragment, - fragment.getClass().getName()); + @Override + public void onActivityResult(int requestCode, int resultCode, Intent data) { + super.onActivityResult(requestCode, resultCode, data); + resultCode = resultCode; + String[] permissions; + if (resultCode != Activity.RESULT_OK) { + finish(); + Bundle resultData = new Bundle(); + resultData.putString(PERMISSIONS, PERMISSION_SCREEN); + resultData.putInt(GRANT_RESULTS, resultCode); + resultReceiver.send(requestCode, resultData); + return; + } + Bundle resultData = new Bundle(); + resultData.putString(PERMISSIONS, PERMISSION_SCREEN); + resultData.putInt(GRANT_RESULTS, resultCode); + resultData.putParcelable(PROJECTION_DATA, data); + resultReceiver.send(requestCode, resultData); + finish(); + } - try { - transaction.commit(); - } catch (IllegalStateException ise) { + private void finish() { + Activity activity = getActivity(); + if (activity != null) { + activity.getFragmentManager().beginTransaction() + .remove(this) + .commitAllowingStateLoss(); + } + } + @Override + public void onResume() { + super.onResume(); + checkSelfPermissions(/* requestPermissions */ true); + } + } + + GetUserMediaImpl(StateProvider stateProvider, Context applicationContext) { + this.stateProvider = stateProvider; + this.applicationContext = applicationContext; + } + + /** + * Includes default constraints set for the audio media type. + * + * @param audioConstraints MediaConstraints instance to be filled with the default + * constraints for audio media type. + */ + private void addDefaultAudioConstraints(MediaConstraints audioConstraints) { + audioConstraints.optional.add( + new MediaConstraints.KeyValuePair("googNoiseSuppression", "true")); + audioConstraints.optional.add( + new MediaConstraints.KeyValuePair("googEchoCancellation", "true")); + audioConstraints.optional.add( + new MediaConstraints.KeyValuePair("echoCancellation", "true")); + audioConstraints.optional.add( + new MediaConstraints.KeyValuePair("googEchoCancellation2", "true")); + audioConstraints.optional.add( + new MediaConstraints.KeyValuePair( + "googDAEchoCancellation", "true")); + } + + /** + * Create video capturer via given facing mode + * + * @param enumerator a CameraEnumerator provided by webrtc it can be Camera1Enumerator or + * Camera2Enumerator + * @param isFacing 'user' mapped with 'front' is true (default) 'environment' mapped with 'back' + * is false + * @param sourceId (String) use this sourceId and ignore facing mode if specified. + * @return VideoCapturer can invoke with startCapture/stopCapture + * null if not matched camera with specified facing mode. + */ + private VideoCapturer createVideoCapturer( + CameraEnumerator enumerator, + boolean isFacing, + String sourceId) { + VideoCapturer videoCapturer = null; + + // if sourceId given, use specified sourceId first + final String[] deviceNames = enumerator.getDeviceNames(); + if (sourceId != null) { + for (String name : deviceNames) { + if (name.equals(sourceId)) { + videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); + if (videoCapturer != null) { + Log.d(TAG, "create user specified camera " + name + " succeeded"); + return videoCapturer; + } else { + Log.d(TAG, "create user specified camera " + name + " failed"); + break; // fallback to facing mode + } } + } } - public static class ScreenRequestPermissionsFragment extends Fragment { - - private ResultReceiver resultReceiver = null; - private int requestCode = 0; - private int resultCode = 0; - - private void checkSelfPermissions(boolean requestPermissions) { - if(resultCode != Activity.RESULT_OK) { - Activity activity = this.getActivity(); - Bundle args = getArguments(); - resultReceiver = args.getParcelable(RESULT_RECEIVER); - requestCode = args.getInt(REQUEST_CODE); - requestStart(activity, requestCode); - } + // otherwise, use facing mode + String facingStr = isFacing ? "front" : "back"; + for (String name : deviceNames) { + if (enumerator.isFrontFacing(name) == isFacing) { + videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); + if (videoCapturer != null) { + Log.d(TAG, "Create " + facingStr + " camera " + name + " succeeded"); + return videoCapturer; + } else { + Log.e(TAG, "Create " + facingStr + " camera " + name + " failed"); } - - public void requestStart(Activity activity, int requestCode) { - if (android.os.Build.VERSION.SDK_INT < minAPILevel) { - Log.w(TAG, "Can't run requestStart() due to a low API level. API level 21 or higher is required."); - return; - } else { - MediaProjectionManager mediaProjectionManager = - (MediaProjectionManager) activity.getSystemService( - Context.MEDIA_PROJECTION_SERVICE); - - // call for the projection manager - this.startActivityForResult( - mediaProjectionManager.createScreenCaptureIntent(), requestCode); - } + } + } + // should we fallback to available camera automatically? + return videoCapturer; + } + + /** + * Retrieves "facingMode" constraint value. + * + * @param mediaConstraints a ConstraintsMap which represents "GUM" constraints argument. + * @return String value of "facingMode" constraints in "GUM" or + * null if not specified. + */ + private String getFacingMode(ConstraintsMap mediaConstraints) { + return + mediaConstraints == null + ? null + : mediaConstraints.getString("facingMode"); + } + + /** + * Retrieves "sourceId" constraint value. + * + * @param mediaConstraints a ConstraintsMap which represents "GUM" constraints argument + * @return String value of "sourceId" optional "GUM" constraint or + * null if not specified. + */ + private String getSourceIdConstraint(ConstraintsMap mediaConstraints) { + if (mediaConstraints != null + && mediaConstraints.hasKey("optional") + && mediaConstraints.getType("optional") == ObjectType.Array) { + ConstraintsArray optional = mediaConstraints.getArray("optional"); + + for (int i = 0, size = optional.size(); i < size; i++) { + if (optional.getType(i) == ObjectType.Map) { + ConstraintsMap option = optional.getMap(i); + + if (option.hasKey("sourceId") + && option.getType("sourceId") + == ObjectType.String) { + return option.getString("sourceId"); + } } + } + } + return null; + } + + private AudioTrack getUserAudio(ConstraintsMap constraints) { + MediaConstraints audioConstraints; + if (constraints.getType("audio") == ObjectType.Boolean) { + audioConstraints = new MediaConstraints(); + addDefaultAudioConstraints(audioConstraints); + } else { + audioConstraints + = MediaConstraintsUtils.parseMediaConstraints( + constraints.getMap("audio")); + } - @Override - public void onActivityResult(int requestCode, int resultCode, Intent data) { - super.onActivityResult(requestCode, resultCode, data); - resultCode = resultCode; - String[] permissions; - if (resultCode != Activity.RESULT_OK) { - finish(); - Bundle resultData = new Bundle(); - resultData.putString(PERMISSIONS, PERMISSION_SCREEN); - resultData.putInt(GRANT_RESULTS, resultCode); - resultReceiver.send(requestCode, resultData); - return; - } - Bundle resultData = new Bundle(); - resultData.putString(PERMISSIONS, PERMISSION_SCREEN); - resultData.putInt(GRANT_RESULTS, resultCode); - resultData.putParcelable(PROJECTION_DATA, data); - resultReceiver.send(requestCode, resultData); - finish(); - } + Log.i(TAG, "getUserMedia(audio): " + audioConstraints); + + String trackId = stateProvider.getNextTrackUUID(); + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); + AudioSource audioSource = pcFactory.createAudioSource(audioConstraints); + + return pcFactory.createAudioTrack(trackId, audioSource); + } + + /** + * Implements {@code getUserMedia} without knowledge whether the necessary permissions have + * already been granted. If the necessary permissions have not been granted yet, they will be + * requested. + */ + void getUserMedia( + final ConstraintsMap constraints, + final Result result, + final MediaStream mediaStream) { + + // TODO: change getUserMedia constraints format to support new syntax + // constraint format seems changed, and there is no mandatory any more. + // and has a new syntax/attrs to specify resolution + // should change `parseConstraints()` according + // see: https://www.w3.org/TR/mediacapture-streams/#idl-def-MediaTrackConstraints + + ConstraintsMap videoConstraintsMap = null; + ConstraintsMap videoConstraintsMandatory = null; + + if (constraints.getType("video") == ObjectType.Map) { + videoConstraintsMap = constraints.getMap("video"); + if (videoConstraintsMap.hasKey("mandatory") + && videoConstraintsMap.getType("mandatory") + == ObjectType.Map) { + videoConstraintsMandatory + = videoConstraintsMap.getMap("mandatory"); + } + } - private void finish() { - Activity activity = getActivity(); - if (activity != null) { - activity.getFragmentManager().beginTransaction() - .remove(this) - .commitAllowingStateLoss(); - } - } + final ArrayList requestPermissions = new ArrayList<>(); + + if (constraints.hasKey("audio")) { + switch (constraints.getType("audio")) { + case Boolean: + if (constraints.getBoolean("audio")) { + requestPermissions.add(PERMISSION_AUDIO); + } + break; + case Map: + requestPermissions.add(PERMISSION_AUDIO); + break; + default: + break; + } + } - @Override - public void onResume() { - super.onResume(); - checkSelfPermissions(/* requestPermissions */ true); - } + if (constraints.hasKey("video")) { + switch (constraints.getType("video")) { + case Boolean: + if (constraints.getBoolean("video")) { + requestPermissions.add(PERMISSION_VIDEO); + } + break; + case Map: + requestPermissions.add(PERMISSION_VIDEO); + break; + default: + break; + } } - GetUserMediaImpl( - FlutterWebRTCPlugin plugin, - Context applicationContext) { - this.plugin = plugin; - this.applicationContext = applicationContext; - } - - /** - * Includes default constraints set for the audio media type. - * @param audioConstraints MediaConstraints instance to be filled - * with the default constraints for audio media type. - */ - private void addDefaultAudioConstraints(MediaConstraints audioConstraints) { - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googNoiseSuppression", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googEchoCancellation", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("echoCancellation", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googEchoCancellation2", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair( - "googDAEchoCancellation", "true")); - } - - /** - * Create video capturer via given facing mode - * @param enumerator a CameraEnumerator provided by webrtc - * it can be Camera1Enumerator or Camera2Enumerator - * @param isFacing 'user' mapped with 'front' is true (default) - * 'environment' mapped with 'back' is false - * @param sourceId (String) use this sourceId and ignore facing mode if specified. - * @return VideoCapturer can invoke with startCapture/stopCapture - * null if not matched camera with specified facing mode. - */ - private VideoCapturer createVideoCapturer( - CameraEnumerator enumerator, - boolean isFacing, - String sourceId) { - VideoCapturer videoCapturer = null; + // According to step 2 of the getUserMedia() algorithm, + // requestedMediaTypes is the set of media types in constraints with + // either a dictionary value or a value of "true". + // According to step 3 of the getUserMedia() algorithm, if + // requestedMediaTypes is the empty set, the method invocation fails + // with a TypeError. + if (requestPermissions.isEmpty()) { + result.error( + "TypeError", + "constraints requests no media types", null); + return; + } - // if sourceId given, use specified sourceId first - final String[] deviceNames = enumerator.getDeviceNames(); - if (sourceId != null) { - for (String name : deviceNames) { - if (name.equals(sourceId)) { - videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); - if (videoCapturer != null) { - Log.d(TAG, "create user specified camera " + name + " succeeded"); - return videoCapturer; - } else { - Log.d(TAG, "create user specified camera " + name + " failed"); - break; // fallback to facing mode - } - } - } + requestPermissions( + requestPermissions, + /* successCallback */ new Callback() { + @Override + public void invoke(Object... args) { + List grantedPermissions = (List) args[0]; + + getUserMedia( + constraints, + result, + mediaStream, + grantedPermissions); + } + }, + /* errorCallback */ new Callback() { + @Override + public void invoke(Object... args) { + // According to step 10 Permission Failure of the + // getUserMedia() algorithm, if the user has denied + // permission, fail "with a new DOMException object whose + // name attribute has the value NotAllowedError." + result.error("DOMException", "NotAllowedError", null); + } } + ); + } + + void getDisplayMedia( + final ConstraintsMap constraints, + final Result result, + final MediaStream mediaStream) { + ConstraintsMap videoConstraintsMap = null; + ConstraintsMap videoConstraintsMandatory = null; + + if (constraints.getType("video") == ObjectType.Map) { + videoConstraintsMap = constraints.getMap("video"); + if (videoConstraintsMap.hasKey("mandatory") + && videoConstraintsMap.getType("mandatory") + == ObjectType.Map) { + videoConstraintsMandatory + = videoConstraintsMap.getMap("mandatory"); + } + } - // otherwise, use facing mode - String facingStr = isFacing ? "front" : "back"; - for (String name : deviceNames) { - if (enumerator.isFrontFacing(name) == isFacing) { - videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); - if (videoCapturer != null) { - Log.d(TAG, "Create " + facingStr + " camera " + name + " succeeded"); - return videoCapturer; - } else { - Log.e(TAG, "Create " + facingStr + " camera " + name + " failed"); - } - } - } - // should we fallback to available camera automatically? - return videoCapturer; - } - - /** - * Retrieves "facingMode" constraint value. - * - * @param mediaConstraints a ConstraintsMap which represents "GUM" - * constraints argument. - * @return String value of "facingMode" constraints in "GUM" or - * null if not specified. - */ - private String getFacingMode(ConstraintsMap mediaConstraints) { - return - mediaConstraints == null - ? null - : mediaConstraints.getString("facingMode"); - } - - /** - * Retrieves "sourceId" constraint value. - * - * @param mediaConstraints a ConstraintsMap which represents "GUM" - * constraints argument - * @return String value of "sourceId" optional "GUM" constraint or - * null if not specified. - */ - private String getSourceIdConstraint(ConstraintsMap mediaConstraints) { - if (mediaConstraints != null - && mediaConstraints.hasKey("optional") - && mediaConstraints.getType("optional") == ObjectType.Array) { - ConstraintsArray optional = mediaConstraints.getArray("optional"); - - for (int i = 0, size = optional.size(); i < size; i++) { - if (optional.getType(i) == ObjectType.Map) { - ConstraintsMap option = optional.getMap(i); - - if (option.hasKey("sourceId") - && option.getType("sourceId") - == ObjectType.String) { - return option.getString("sourceId"); - } - } - } - } + final ConstraintsMap videoConstraintsMandatory2 = videoConstraintsMandatory; - return null; - } + screenRequestPremissions(new ResultReceiver(new Handler(Looper.getMainLooper())) { + @Override + protected void onReceiveResult( + int requestCode, + Bundle resultData) { - private AudioTrack getUserAudio(ConstraintsMap constraints) { - MediaConstraints audioConstraints; - if (constraints.getType("audio") == ObjectType.Boolean) { - audioConstraints = new MediaConstraints(); - addDefaultAudioConstraints(audioConstraints); - } else { - audioConstraints - = plugin.parseMediaConstraints( - constraints.getMap("audio")); - } + /* Create ScreenCapture */ + int resultCode = resultData.getInt(GRANT_RESULTS); + Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); - Log.i(TAG, "getUserMedia(audio): " + audioConstraints); - - String trackId = plugin.getNextTrackUUID(); - PeerConnectionFactory pcFactory = plugin.mFactory; - AudioSource audioSource = pcFactory.createAudioSource(audioConstraints); - - return pcFactory.createAudioTrack(trackId, audioSource); - } - - /** - * Implements {@code getUserMedia} without knowledge whether the necessary - * permissions have already been granted. If the necessary permissions have - * not been granted yet, they will be requested. - */ - void getUserMedia( - final ConstraintsMap constraints, - final Result result, - final MediaStream mediaStream) { - - // TODO: change getUserMedia constraints format to support new syntax - // constraint format seems changed, and there is no mandatory any more. - // and has a new syntax/attrs to specify resolution - // should change `parseConstraints()` according - // see: https://www.w3.org/TR/mediacapture-streams/#idl-def-MediaTrackConstraints - - ConstraintsMap videoConstraintsMap = null; - ConstraintsMap videoConstraintsMandatory = null; - - if (constraints.getType("video") == ObjectType.Map) { - videoConstraintsMap = constraints.getMap("video"); - if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") - == ObjectType.Map) { - videoConstraintsMandatory - = videoConstraintsMap.getMap("mandatory"); - } + if (resultCode != Activity.RESULT_OK) { + result.error(null, "User didn't give permission to capture the screen.", null); + return; } - final ArrayList requestPermissions = new ArrayList<>(); - - if (constraints.hasKey("audio")) { - switch (constraints.getType("audio")) { - case Boolean: - if (constraints.getBoolean("audio")) { - requestPermissions.add(PERMISSION_AUDIO); - } - break; - case Map: - requestPermissions.add(PERMISSION_AUDIO); - break; - default: - break; - } + MediaStreamTrack[] tracks = new MediaStreamTrack[1]; + VideoCapturer videoCapturer = null; + videoCapturer = new ScreenCapturerAndroid(mediaProjectionData, + new MediaProjection.Callback() { + @Override + public void onStop() { + Log.e(TAG, "User revoked permission to capture the screen."); + result.error(null, "User revoked permission to capture the screen.", null); + } + }); + if (videoCapturer == null) { + result.error( + /* type */ "GetDisplayMediaFailed", + "Failed to create new VideoCapturer!", null); + return; } - if (constraints.hasKey("video")) { - switch (constraints.getType("video")) { - case Boolean: - if (constraints.getBoolean("video")) { - requestPermissions.add(PERMISSION_VIDEO); - } - break; - case Map: - requestPermissions.add(PERMISSION_VIDEO); - break; - default: - break; - } - } + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); + VideoSource videoSource = pcFactory.createVideoSource(true); - // According to step 2 of the getUserMedia() algorithm, - // requestedMediaTypes is the set of media types in constraints with - // either a dictionary value or a value of "true". - // According to step 3 of the getUserMedia() algorithm, if - // requestedMediaTypes is the empty set, the method invocation fails - // with a TypeError. - if (requestPermissions.isEmpty()) { - result.error( - "TypeError", - "constraints requests no media types", null); - return; - } + String threadName = Thread.currentThread().getName(); + SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper + .create(threadName, EglUtils.getRootEglBaseContext()); + videoCapturer.initialize(surfaceTextureHelper, applicationContext, + videoSource.getCapturerObserver()); - requestPermissions( - requestPermissions, - /* successCallback */ new Callback() { - @Override - public void invoke(Object... args) { - List grantedPermissions = (List) args[0]; - - getUserMedia( - constraints, - result, - mediaStream, - grantedPermissions); - } - }, - /* errorCallback */ new Callback() { - @Override - public void invoke(Object... args) { - // According to step 10 Permission Failure of the - // getUserMedia() algorithm, if the user has denied - // permission, fail "with a new DOMException object whose - // name attribute has the value NotAllowedError." - result.error("DOMException", "NotAllowedError", null); - } - } - ); - } - - void getDisplayMedia( - final ConstraintsMap constraints, - final Result result, - final MediaStream mediaStream) { - ConstraintsMap videoConstraintsMap = null; - ConstraintsMap videoConstraintsMandatory = null; - - if (constraints.getType("video") == ObjectType.Map) { - videoConstraintsMap = constraints.getMap("video"); - if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") - == ObjectType.Map) { - videoConstraintsMandatory - = videoConstraintsMap.getMap("mandatory"); - } - } + WindowManager wm = (WindowManager) applicationContext + .getSystemService(Context.WINDOW_SERVICE); - final ConstraintsMap videoConstraintsMandatory2 = videoConstraintsMandatory; - screenRequestPremissions(new ResultReceiver(new Handler(Looper.getMainLooper())) { - @Override - protected void onReceiveResult( - int requestCode, - Bundle resultData) { - - /* Create ScreenCapture */ - int resultCode = resultData.getInt(GRANT_RESULTS); - Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); - - if (resultCode != Activity.RESULT_OK) { - result.error(null, "User didn't give permission to capture the screen.", null); - return; - } - - MediaStreamTrack[] tracks = new MediaStreamTrack[1]; - VideoCapturer videoCapturer = null; - videoCapturer = new ScreenCapturerAndroid(mediaProjectionData, new MediaProjection.Callback() { - @Override - public void onStop() { - Log.e(TAG, "User revoked permission to capture the screen."); - result.error(null, "User revoked permission to capture the screen.", null); - } - }); - if (videoCapturer == null) { - result.error( - /* type */ "GetDisplayMediaFailed", - "Failed to create new VideoCapturer!", null); - return; - } - - PeerConnectionFactory pcFactory = plugin.mFactory; - VideoSource videoSource = pcFactory.createVideoSource(true); - - Context context = plugin.getContext(); - String threadName = Thread.currentThread().getName(); - SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); - videoCapturer.initialize(surfaceTextureHelper, context, videoSource.getCapturerObserver()); - - WindowManager wm = (WindowManager) applicationContext - .getSystemService(Context.WINDOW_SERVICE); - - int width = wm.getDefaultDisplay().getWidth(); - int height = wm.getDefaultDisplay().getHeight(); - int fps = DEFAULT_FPS; - - videoCapturer.startCapture(width, height, fps); - Log.d(TAG, "ScreenCapturerAndroid.startCapture: " + width + "x" + height + "@" + fps); - - String trackId = plugin.getNextTrackUUID(); - mVideoCapturers.put(trackId, videoCapturer); - - tracks[0] = pcFactory.createVideoTrack(trackId, videoSource); - - ConstraintsArray audioTracks = new ConstraintsArray(); - ConstraintsArray videoTracks = new ConstraintsArray(); - ConstraintsMap successResult = new ConstraintsMap(); - - for (MediaStreamTrack track : tracks) { - if (track == null) { - continue; - } - - String id = track.id(); - - if (track instanceof AudioTrack) { - mediaStream.addTrack((AudioTrack) track); - } else { - mediaStream.addTrack((VideoTrack) track); - } - plugin.localTracks.put(id, track); - - ConstraintsMap track_ = new ConstraintsMap(); - String kind = track.kind(); - - track_.putBoolean("enabled", track.enabled()); - track_.putString("id", id); - track_.putString("kind", kind); - track_.putString("label", kind); - track_.putString("readyState", track.state().toString()); - track_.putBoolean("remote", false); - - if (track instanceof AudioTrack) { - audioTracks.pushMap(track_); - } else { - videoTracks.pushMap(track_); - } - } - - String streamId = mediaStream.getId(); - - Log.d(TAG, "MediaStream id: " + streamId); - plugin.localStreams.put(streamId, mediaStream); - successResult.putString("streamId", streamId); - successResult.putArray("audioTracks", audioTracks.toArrayList()); - successResult.putArray("videoTracks", videoTracks.toArrayList()); - result.success(successResult.toMap()); - } - }); - } - - /** - * Implements {@code getUserMedia} with the knowledge that the necessary - * permissions have already been granted. If the necessary permissions have - * not been granted yet, they will NOT be requested. - */ - private void getUserMedia( - ConstraintsMap constraints, - Result result, - MediaStream mediaStream, - List grantedPermissions) { - MediaStreamTrack[] tracks = new MediaStreamTrack[2]; - - // If we fail to create either, destroy the other one and fail. - if ((grantedPermissions.contains(PERMISSION_AUDIO) - && (tracks[0] = getUserAudio(constraints)) == null) - || (grantedPermissions.contains(PERMISSION_VIDEO) - && (tracks[1] = getUserVideo(constraints)) == null)) { - for (MediaStreamTrack track : tracks) { - if (track != null) { - track.dispose(); - } - } - - // XXX The following does not follow the getUserMedia() algorithm - // specified by - // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia - // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "GetUserMediaFailed", - "Failed to create new track", null); - return; - } + int width = wm.getDefaultDisplay().getWidth(); + int height = wm.getDefaultDisplay().getHeight(); + int fps = DEFAULT_FPS; + + videoCapturer.startCapture(width, height, fps); + Log.d(TAG, "ScreenCapturerAndroid.startCapture: " + width + "x" + height + "@" + fps); + + String trackId = stateProvider.getNextTrackUUID(); + mVideoCapturers.put(trackId, videoCapturer); + + tracks[0] = pcFactory.createVideoTrack(trackId, videoSource); ConstraintsArray audioTracks = new ConstraintsArray(); ConstraintsArray videoTracks = new ConstraintsArray(); ConstraintsMap successResult = new ConstraintsMap(); for (MediaStreamTrack track : tracks) { - if (track == null) { - continue; - } - - String id = track.id(); - - if (track instanceof AudioTrack) { - mediaStream.addTrack((AudioTrack) track); - } else { - mediaStream.addTrack((VideoTrack) track); - } - plugin.localTracks.put(id, track); - - ConstraintsMap track_ = new ConstraintsMap(); - String kind = track.kind(); - - track_.putBoolean("enabled", track.enabled()); - track_.putString("id", id); - track_.putString("kind", kind); - track_.putString("label", kind); - track_.putString("readyState", track.state().toString()); - track_.putBoolean("remote", false); - - if (track instanceof AudioTrack) { - audioTracks.pushMap(track_); - } else { - videoTracks.pushMap(track_); - } + if (track == null) { + continue; + } + + String id = track.id(); + + if (track instanceof AudioTrack) { + mediaStream.addTrack((AudioTrack) track); + } else { + mediaStream.addTrack((VideoTrack) track); + } + stateProvider.getLocalTracks().put(id, track); + + ConstraintsMap track_ = new ConstraintsMap(); + String kind = track.kind(); + + track_.putBoolean("enabled", track.enabled()); + track_.putString("id", id); + track_.putString("kind", kind); + track_.putString("label", kind); + track_.putString("readyState", track.state().toString()); + track_.putBoolean("remote", false); + + if (track instanceof AudioTrack) { + audioTracks.pushMap(track_); + } else { + videoTracks.pushMap(track_); + } } String streamId = mediaStream.getId(); Log.d(TAG, "MediaStream id: " + streamId); - plugin.localStreams.put(streamId, mediaStream); - + stateProvider.getLocalStreams().put(streamId, mediaStream); successResult.putString("streamId", streamId); successResult.putArray("audioTracks", audioTracks.toArrayList()); successResult.putArray("videoTracks", videoTracks.toArrayList()); result.success(successResult.toMap()); - } - - - private VideoTrack getUserVideo(ConstraintsMap constraints) { - ConstraintsMap videoConstraintsMap = null; - ConstraintsMap videoConstraintsMandatory = null; - if (constraints.getType("video") == ObjectType.Map) { - videoConstraintsMap = constraints.getMap("video"); - if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") - == ObjectType.Map) { - videoConstraintsMandatory - = videoConstraintsMap.getMap("mandatory"); - } + } + }); + } + + /** + * Implements {@code getUserMedia} with the knowledge that the necessary permissions have already + * been granted. If the necessary permissions have not been granted yet, they will NOT be + * requested. + */ + private void getUserMedia( + ConstraintsMap constraints, + Result result, + MediaStream mediaStream, + List grantedPermissions) { + MediaStreamTrack[] tracks = new MediaStreamTrack[2]; + + // If we fail to create either, destroy the other one and fail. + if ((grantedPermissions.contains(PERMISSION_AUDIO) + && (tracks[0] = getUserAudio(constraints)) == null) + || (grantedPermissions.contains(PERMISSION_VIDEO) + && (tracks[1] = getUserVideo(constraints)) == null)) { + for (MediaStreamTrack track : tracks) { + if (track != null) { + track.dispose(); } + } + + // XXX The following does not follow the getUserMedia() algorithm + // specified by + // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia + // with respect to distinguishing the various causes of failure. + result.error( + /* type */ "GetUserMediaFailed", + "Failed to create new track", null); + return; + } - Log.i(TAG, "getUserMedia(video): " + videoConstraintsMap); - - // NOTE: to support Camera2, the device should: - // 1. Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP - // 2. all camera support level should greater than LEGACY - // see: https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#INFO_SUPPORTED_HARDWARE_LEVEL - // TODO Enable camera2 enumerator - Context context = plugin.getContext(); - CameraEnumerator cameraEnumerator; - - if (Camera2Enumerator.isSupported(context)) { - Log.d(TAG, "Creating video capturer using Camera2 API."); - cameraEnumerator = new Camera2Enumerator(context); - } else { - Log.d(TAG, "Creating video capturer using Camera1 API."); - cameraEnumerator = new Camera1Enumerator(false); - } - - String facingMode = getFacingMode(videoConstraintsMap); - boolean isFacing - = facingMode == null || !facingMode.equals("environment"); - String sourceId = getSourceIdConstraint(videoConstraintsMap); - - VideoCapturer videoCapturer - = createVideoCapturer(cameraEnumerator, isFacing, sourceId); - - if (videoCapturer == null) { - return null; - } + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); + ConstraintsMap successResult = new ConstraintsMap(); + + for (MediaStreamTrack track : tracks) { + if (track == null) { + continue; + } + + String id = track.id(); + + if (track instanceof AudioTrack) { + mediaStream.addTrack((AudioTrack) track); + } else { + mediaStream.addTrack((VideoTrack) track); + } + stateProvider.getLocalTracks().put(id, track); + + ConstraintsMap track_ = new ConstraintsMap(); + String kind = track.kind(); + + track_.putBoolean("enabled", track.enabled()); + track_.putString("id", id); + track_.putString("kind", kind); + track_.putString("label", kind); + track_.putString("readyState", track.state().toString()); + track_.putBoolean("remote", false); + + if (track instanceof AudioTrack) { + audioTracks.pushMap(track_); + } else { + videoTracks.pushMap(track_); + } + } - PeerConnectionFactory pcFactory = plugin.mFactory; - VideoSource videoSource = pcFactory.createVideoSource(false); - String threadName = Thread.currentThread().getName(); - SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); - videoCapturer.initialize(surfaceTextureHelper, context, videoSource.getCapturerObserver()); - - // Fall back to defaults if keys are missing. - int width - = videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minWidth") - ? videoConstraintsMandatory.getInt("minWidth") - : DEFAULT_WIDTH; - int height - = videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minHeight") - ? videoConstraintsMandatory.getInt("minHeight") - : DEFAULT_HEIGHT; - int fps - = videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minFrameRate") - ? videoConstraintsMandatory.getInt("minFrameRate") - : DEFAULT_FPS; + String streamId = mediaStream.getId(); + + Log.d(TAG, "MediaStream id: " + streamId); + stateProvider.getLocalStreams().put(streamId, mediaStream); + + successResult.putString("streamId", streamId); + successResult.putArray("audioTracks", audioTracks.toArrayList()); + successResult.putArray("videoTracks", videoTracks.toArrayList()); + result.success(successResult.toMap()); + } + + + private VideoTrack getUserVideo(ConstraintsMap constraints) { + ConstraintsMap videoConstraintsMap = null; + ConstraintsMap videoConstraintsMandatory = null; + if (constraints.getType("video") == ObjectType.Map) { + videoConstraintsMap = constraints.getMap("video"); + if (videoConstraintsMap.hasKey("mandatory") + && videoConstraintsMap.getType("mandatory") + == ObjectType.Map) { + videoConstraintsMandatory + = videoConstraintsMap.getMap("mandatory"); + } + } - videoCapturer.startCapture(width, height, fps); + Log.i(TAG, "getUserMedia(video): " + videoConstraintsMap); + + // NOTE: to support Camera2, the device should: + // 1. Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP + // 2. all camera support level should greater than LEGACY + // see: https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#INFO_SUPPORTED_HARDWARE_LEVEL + // TODO Enable camera2 enumerator + CameraEnumerator cameraEnumerator; + + if (Camera2Enumerator.isSupported(applicationContext)) { + Log.d(TAG, "Creating video capturer using Camera2 API."); + cameraEnumerator = new Camera2Enumerator(applicationContext); + } else { + Log.d(TAG, "Creating video capturer using Camera1 API."); + cameraEnumerator = new Camera1Enumerator(false); + } - String trackId = plugin.getNextTrackUUID(); - mVideoCapturers.put(trackId, videoCapturer); + String facingMode = getFacingMode(videoConstraintsMap); + boolean isFacing + = facingMode == null || !facingMode.equals("environment"); + String sourceId = getSourceIdConstraint(videoConstraintsMap); - Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + fps); - videoSource.adaptOutputFormat(width, height, fps); + VideoCapturer videoCapturer + = createVideoCapturer(cameraEnumerator, isFacing, sourceId); - return pcFactory.createVideoTrack(trackId, videoSource); + if (videoCapturer == null) { + return null; } - void removeVideoCapturer(String id) { - VideoCapturer videoCapturer = mVideoCapturers.get(id); - if (videoCapturer != null) { - try { - videoCapturer.stopCapture(); - } catch (InterruptedException e) { - Log.e(TAG, "removeVideoCapturer() Failed to stop video capturer"); - } finally { - videoCapturer.dispose(); - mVideoCapturers.remove(id); - } - } + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); + VideoSource videoSource = pcFactory.createVideoSource(false); + String threadName = Thread.currentThread().getName(); + SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper + .create(threadName, EglUtils.getRootEglBaseContext()); + videoCapturer + .initialize(surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); + + // Fall back to defaults if keys are missing. + int width + = videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minWidth") + ? videoConstraintsMandatory.getInt("minWidth") + : DEFAULT_WIDTH; + int height + = videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minHeight") + ? videoConstraintsMandatory.getInt("minHeight") + : DEFAULT_HEIGHT; + int fps + = videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minFrameRate") + ? videoConstraintsMandatory.getInt("minFrameRate") + : DEFAULT_FPS; + + videoCapturer.startCapture(width, height, fps); + + String trackId = stateProvider.getNextTrackUUID(); + mVideoCapturers.put(trackId, videoCapturer); + + Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + fps); + videoSource.adaptOutputFormat(width, height, fps); + + return pcFactory.createVideoTrack(trackId, videoSource); + } + + void removeVideoCapturer(String id) { + VideoCapturer videoCapturer = mVideoCapturers.get(id); + if (videoCapturer != null) { + try { + videoCapturer.stopCapture(); + } catch (InterruptedException e) { + Log.e(TAG, "removeVideoCapturer() Failed to stop video capturer"); + } finally { + videoCapturer.dispose(); + mVideoCapturers.remove(id); + } } + } + + private void requestPermissions( + final ArrayList permissions, + final Callback successCallback, + final Callback errorCallback) { + PermissionUtils.Callback callback = (permissions_, grantResults) -> { + List grantedPermissions = new ArrayList<>(); + List deniedPermissions = new ArrayList<>(); + + for (int i = 0; i < permissions_.length; ++i) { + String permission = permissions_[i]; + int grantResult = grantResults[i]; + + if (grantResult == PackageManager.PERMISSION_GRANTED) { + grantedPermissions.add(permission); + } else { + deniedPermissions.add(permission); + } + } + + // Success means that all requested permissions were granted. + for (String p : permissions) { + if (!grantedPermissions.contains(p)) { + // According to step 6 of the getUserMedia() algorithm + // "if the result is denied, jump to the step Permission + // Failure." + errorCallback.invoke(deniedPermissions); + return; + } + } + successCallback.invoke(grantedPermissions); + }; - private void requestPermissions( - final ArrayList permissions, - final Callback successCallback, - final Callback errorCallback) { - PermissionUtils.Callback callback = new PermissionUtils.Callback() { - @Override - public void invoke(String[] permissions_, int[] grantResults) { - List grantedPermissions = new ArrayList<>(); - List deniedPermissions = new ArrayList<>(); - - for (int i = 0; i < permissions_.length; ++i) { - String permission = permissions_[i]; - int grantResult = grantResults[i]; - - if (grantResult == PackageManager.PERMISSION_GRANTED) { - grantedPermissions.add(permission); - } else { - deniedPermissions.add(permission); - } - } - - // Success means that all requested permissions were granted. - for (String p : permissions) { - if (!grantedPermissions.contains(p)) { - // According to step 6 of the getUserMedia() algorithm - // "if the result is denied, jump to the step Permission - // Failure." - errorCallback.invoke(deniedPermissions); - return; - } - } - successCallback.invoke(grantedPermissions); - } - }; - + if (VERSION.SDK_INT >= VERSION_CODES.M) { + final Activity activity = stateProvider.getActivity(); + if (activity != null) { PermissionUtils.requestPermissions( - plugin, + activity, permissions.toArray(new String[permissions.size()]), callback); + } } + } - void switchCamera(String id, Result result) { - VideoCapturer videoCapturer = mVideoCapturers.get(id); - if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + id, null); - return; - } - - CameraVideoCapturer cameraVideoCapturer - = (CameraVideoCapturer) videoCapturer; - cameraVideoCapturer.switchCamera(new CameraVideoCapturer.CameraSwitchHandler() { - @Override - public void onCameraSwitchDone(boolean b) { - result.success(b); - } - @Override - public void onCameraSwitchError(String s) { - result.error("Switching camera failed", s, null); - } - }); - } - - /** Creates and starts recording of local stream to file - * @param path to the file for record - * @param videoTrack to record or null if only audio needed - * @param audioChannel channel for recording or null - * @throws Exception lot of different exceptions, pass back to dart layer to print them at least - * **/ - void startRecordingToFile(String path, Integer id, @Nullable VideoTrack videoTrack, @Nullable AudioChannel audioChannel) throws Exception { - AudioSamplesInterceptor interceptor = null; - if (audioChannel == AudioChannel.INPUT) - interceptor = inputSamplesInterceptor; - else if (audioChannel == AudioChannel.OUTPUT) { - if (outputSamplesInterceptor == null) - outputSamplesInterceptor = new OutputAudioSamplesInterceptor(audioDeviceModule); - interceptor = outputSamplesInterceptor; - } - MediaRecorderImpl mediaRecorder = new MediaRecorderImpl(id, videoTrack, interceptor); - mediaRecorder.startRecording(new File(path)); - mediaRecorders.append(id, mediaRecorder); - } - - void stopRecording(Integer id) { - MediaRecorderImpl mediaRecorder = mediaRecorders.get(id); - if (mediaRecorder != null) { - mediaRecorder.stopRecording(); - mediaRecorders.remove(id); - File file = mediaRecorder.getRecordFile(); - if (file != null) { - ContentValues values = new ContentValues(3); - values.put(MediaStore.Video.Media.TITLE, file.getName()); - values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4"); - values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath()); - applicationContext.getContentResolver().insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values); - } - } + void switchCamera(String id, Result result) { + VideoCapturer videoCapturer = mVideoCapturers.get(id); + if (videoCapturer == null) { + result.error(null, "Video capturer not found for id: " + id, null); + return; } - void hasTorch(String trackId, Result result) { - VideoCapturer videoCapturer = mVideoCapturers.get(trackId); - if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + trackId, null); - return; - } - - if (videoCapturer instanceof Camera2Capturer) { - CameraManager manager; - CameraDevice cameraDevice; - - try { - Object session = getPrivateProperty(Camera2Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - manager = (CameraManager) getPrivateProperty(Camera2Capturer.class, videoCapturer, "cameraManager"); - cameraDevice = (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; - } - - boolean flashIsAvailable; - try { - CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId()); - flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); - } - - result.success(flashIsAvailable); - return; - } - - if (videoCapturer instanceof Camera1Capturer) { - Camera camera; - - try { - Object session = getPrivateProperty(Camera1Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; - } + CameraVideoCapturer cameraVideoCapturer + = (CameraVideoCapturer) videoCapturer; + cameraVideoCapturer.switchCamera(new CameraVideoCapturer.CameraSwitchHandler() { + @Override + public void onCameraSwitchDone(boolean b) { + result.success(b); + } + + @Override + public void onCameraSwitchError(String s) { + result.error("Switching camera failed", s, null); + } + }); + } + + /** + * Creates and starts recording of local stream to file + * + * @param path to the file for record + * @param videoTrack to record or null if only audio needed + * @param audioChannel channel for recording or null + * @throws Exception lot of different exceptions, pass back to dart layer to print them at least + **/ + void startRecordingToFile(String path, Integer id, @Nullable VideoTrack videoTrack, + @Nullable AudioChannel audioChannel) throws Exception { + AudioSamplesInterceptor interceptor = null; + if (audioChannel == AudioChannel.INPUT) { + interceptor = inputSamplesInterceptor; + } else if (audioChannel == AudioChannel.OUTPUT) { + if (outputSamplesInterceptor == null) { + outputSamplesInterceptor = new OutputAudioSamplesInterceptor(audioDeviceModule); + } + interceptor = outputSamplesInterceptor; + } + MediaRecorderImpl mediaRecorder = new MediaRecorderImpl(id, videoTrack, interceptor); + mediaRecorder.startRecording(new File(path)); + mediaRecorders.append(id, mediaRecorder); + } + + void stopRecording(Integer id) { + MediaRecorderImpl mediaRecorder = mediaRecorders.get(id); + if (mediaRecorder != null) { + mediaRecorder.stopRecording(); + mediaRecorders.remove(id); + File file = mediaRecorder.getRecordFile(); + if (file != null) { + ContentValues values = new ContentValues(3); + values.put(MediaStore.Video.Media.TITLE, file.getName()); + values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4"); + values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath()); + applicationContext.getContentResolver() + .insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values); + } + } + } - Camera.Parameters params = camera.getParameters(); - List supportedModes = params.getSupportedFlashModes(); + void hasTorch(String trackId, Result result) { + VideoCapturer videoCapturer = mVideoCapturers.get(trackId); + if (videoCapturer == null) { + result.error(null, "Video capturer not found for id: " + trackId, null); + return; + } - result.success((supportedModes == null) ? false : supportedModes.contains(Camera.Parameters.FLASH_MODE_TORCH)); - return; - } + if (VERSION.SDK_INT >= VERSION_CODES.LOLLIPOP && videoCapturer instanceof Camera2Capturer) { + CameraManager manager; + CameraDevice cameraDevice; + + try { + Object session = getPrivateProperty(Camera2Capturer.class.getSuperclass(), videoCapturer, + "currentSession"); + manager = (CameraManager) getPrivateProperty(Camera2Capturer.class, videoCapturer, + "cameraManager"); + cameraDevice = (CameraDevice) getPrivateProperty(session.getClass(), session, + "cameraDevice"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); + result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + return; + } + + boolean flashIsAvailable; + try { + CameraCharacteristics characteristics = manager + .getCameraCharacteristics(cameraDevice.getId()); + flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + result.success(flashIsAvailable); + return; + } - Log.e(TAG, "[TORCH] Video capturer not compatible"); - result.error(null, "Video capturer not compatible", null); + if (videoCapturer instanceof Camera1Capturer) { + Camera camera; + + try { + Object session = getPrivateProperty(Camera1Capturer.class.getSuperclass(), videoCapturer, + "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); + result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + return; + } + + Parameters params = camera.getParameters(); + List supportedModes = params.getSupportedFlashModes(); + + result.success((supportedModes == null) ? false + : supportedModes.contains(Parameters.FLASH_MODE_TORCH)); + return; } - void setTorch(String trackId, boolean torch, Result result) { - VideoCapturer videoCapturer = mVideoCapturers.get(trackId); - if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + trackId, null); - return; - } + Log.e(TAG, "[TORCH] Video capturer not compatible"); + result.error(null, "Video capturer not compatible", null); + } - if (videoCapturer instanceof Camera2Capturer) { - CameraCaptureSession captureSession; - CameraDevice cameraDevice; - CaptureFormat captureFormat; - int fpsUnitFactor; - Surface surface; - Handler cameraThreadHandler; - - try { - Object session = getPrivateProperty(Camera2Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - CameraManager manager = (CameraManager) getPrivateProperty(Camera2Capturer.class, videoCapturer, "cameraManager"); - captureSession = (CameraCaptureSession) getPrivateProperty(session.getClass(), session, "captureSession"); - cameraDevice = (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - captureFormat = (CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); - fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); - surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); - cameraThreadHandler = (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; - } - - try { - final CaptureRequest.Builder captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); - captureRequestBuilder.set(CaptureRequest.FLASH_MODE, torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); - captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, - new Range(captureFormat.framerate.min / fpsUnitFactor, - captureFormat.framerate.max / fpsUnitFactor)); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); - captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); - captureRequestBuilder.addTarget(surface); - captureSession.setRepeatingRequest(captureRequestBuilder.build(), null, cameraThreadHandler); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); - } - - result.success(null); - return; - } + @RequiresApi(api = VERSION_CODES.LOLLIPOP) + void setTorch(String trackId, boolean torch, Result result) { + VideoCapturer videoCapturer = mVideoCapturers.get(trackId); + if (videoCapturer == null) { + result.error(null, "Video capturer not found for id: " + trackId, null); + return; + } - if (videoCapturer instanceof Camera1Capturer) { - Camera camera; - try { - Object session = getPrivateProperty(Camera1Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; - } - - Camera.Parameters params = camera.getParameters(); - params.setFlashMode(torch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); - camera.setParameters(params); - - result.success(null); - return; - } + if (videoCapturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + + try { + Object session = getPrivateProperty(Camera2Capturer.class.getSuperclass(), videoCapturer, + "currentSession"); + CameraManager manager = (CameraManager) getPrivateProperty(Camera2Capturer.class, + videoCapturer, "cameraManager"); + captureSession = (CameraCaptureSession) getPrivateProperty(session.getClass(), session, + "captureSession"); + cameraDevice = (CameraDevice) getPrivateProperty(session.getClass(), session, + "cameraDevice"); + captureFormat = (CaptureFormat) getPrivateProperty(session.getClass(), session, + "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = (Handler) getPrivateProperty(session.getClass(), session, + "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); + result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = cameraDevice + .createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + captureRequestBuilder.set(CaptureRequest.FLASH_MODE, + torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>(captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession + .setRepeatingRequest(captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + result.success(null); + return; + } - Log.e(TAG, "[TORCH] Video capturer not compatible"); - result.error(null, "Video capturer not compatible", null); + if (videoCapturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = getPrivateProperty(Camera1Capturer.class.getSuperclass(), videoCapturer, + "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); + result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + torch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + camera.setParameters(params); + + result.success(null); + return; } - private Object getPrivateProperty (Class klass, Object object, String fieldName) throws NoSuchFieldWithNameException { - try { - Field field = klass.getDeclaredField(fieldName); - field.setAccessible(true); - return field.get(object); - } catch (NoSuchFieldException e) { - throw new NoSuchFieldWithNameException(klass.getName(), fieldName, e); - } catch (IllegalAccessException e) { - // Should never happen since we are calling `setAccessible(true)` - throw new RuntimeException(e); - } + Log.e(TAG, "[TORCH] Video capturer not compatible"); + result.error(null, "Video capturer not compatible", null); + } + + private Object getPrivateProperty(Class klass, Object object, String fieldName) + throws NoSuchFieldWithNameException { + try { + Field field = klass.getDeclaredField(fieldName); + field.setAccessible(true); + return field.get(object); + } catch (NoSuchFieldException e) { + throw new NoSuchFieldWithNameException(klass.getName(), fieldName, e); + } catch (IllegalAccessException e) { + // Should never happen since we are calling `setAccessible(true)` + throw new RuntimeException(e); } + } - private class NoSuchFieldWithNameException extends NoSuchFieldException { - String className; - String fieldName; + private class NoSuchFieldWithNameException extends NoSuchFieldException { - NoSuchFieldWithNameException(String className, String fieldName, NoSuchFieldException e) { - super(e.getMessage()); - this.className = className; - this.fieldName = fieldName; - } + String className; + String fieldName; + + NoSuchFieldWithNameException(String className, String fieldName, NoSuchFieldException e) { + super(e.getMessage()); + this.className = className; + this.fieldName = fieldName; } + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java new file mode 100644 index 0000000000..8a207fbfc1 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -0,0 +1,1400 @@ +package com.cloudwebrtc.webrtc; + +import static com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils.parseMediaConstraints; + +import android.app.Activity; +import android.content.Context; +import android.graphics.SurfaceTexture; +import android.hardware.Camera; +import android.hardware.Camera.CameraInfo; +import android.util.Log; +import android.util.LongSparseArray; +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import com.cloudwebrtc.webrtc.record.AudioChannel; +import com.cloudwebrtc.webrtc.record.FrameCapturer; +import com.cloudwebrtc.webrtc.utils.AnyThreadResult; +import com.cloudwebrtc.webrtc.utils.ConstraintsArray; +import com.cloudwebrtc.webrtc.utils.ConstraintsMap; +import com.cloudwebrtc.webrtc.utils.EglUtils; +import com.cloudwebrtc.webrtc.utils.ObjectType; +import com.cloudwebrtc.webrtc.utils.RTCAudioManager; +import io.flutter.plugin.common.BinaryMessenger; +import io.flutter.plugin.common.EventChannel; +import io.flutter.plugin.common.MethodCall; +import io.flutter.plugin.common.MethodChannel.MethodCallHandler; +import io.flutter.plugin.common.MethodChannel.Result; +import io.flutter.view.TextureRegistry; +import io.flutter.view.TextureRegistry.SurfaceTextureEntry; +import java.io.File; +import java.io.UnsupportedEncodingException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.UUID; +import org.webrtc.AudioTrack; +import org.webrtc.DefaultVideoDecoderFactory; +import org.webrtc.DefaultVideoEncoderFactory; +import org.webrtc.EglBase; +import org.webrtc.IceCandidate; +import org.webrtc.Logging; +import org.webrtc.MediaConstraints; +import org.webrtc.MediaConstraints.KeyValuePair; +import org.webrtc.MediaStream; +import org.webrtc.MediaStreamTrack; +import org.webrtc.PeerConnection; +import org.webrtc.PeerConnection.BundlePolicy; +import org.webrtc.PeerConnection.CandidateNetworkPolicy; +import org.webrtc.PeerConnection.ContinualGatheringPolicy; +import org.webrtc.PeerConnection.IceServer; +import org.webrtc.PeerConnection.IceServer.Builder; +import org.webrtc.PeerConnection.IceTransportsType; +import org.webrtc.PeerConnection.KeyType; +import org.webrtc.PeerConnection.RTCConfiguration; +import org.webrtc.PeerConnection.RtcpMuxPolicy; +import org.webrtc.PeerConnection.SdpSemantics; +import org.webrtc.PeerConnection.TcpCandidatePolicy; +import org.webrtc.PeerConnectionFactory; +import org.webrtc.PeerConnectionFactory.InitializationOptions; +import org.webrtc.PeerConnectionFactory.Options; +import org.webrtc.SdpObserver; +import org.webrtc.SessionDescription; +import org.webrtc.SessionDescription.Type; +import org.webrtc.VideoTrack; +import org.webrtc.audio.AudioDeviceModule; +import org.webrtc.audio.JavaAudioDeviceModule; + +public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider { + + + interface AudioManager { + + void onAudioManagerRequested(boolean requested); + + void setMicrophoneMute(boolean mute); + + void setSpeakerphoneOn(boolean on); + + + } + + static public final String TAG = "FlutterWebRTCPlugin"; + + private final Map mPeerConnectionObservers = new HashMap<>(); + private BinaryMessenger messenger; + private Context context; + private final TextureRegistry textures; + + private PeerConnectionFactory mFactory; + + private final Map localStreams = new HashMap<>(); + private final Map localTracks = new HashMap<>(); + + private LongSparseArray renders = new LongSparseArray<>(); + + /** + * The implementation of {@code getUserMedia} extracted into a separate file in order to reduce + * complexity and to (somewhat) separate concerns. + */ + private GetUserMediaImpl getUserMediaImpl; + + private final AudioManager audioManager; + + private AudioDeviceModule audioDeviceModule; + + private Activity activity; + + MethodCallHandlerImpl(Context context, BinaryMessenger messenger, TextureRegistry textureRegistry, + @NonNull AudioManager audioManager) { + this.context = context; + this.textures = textureRegistry; + this.messenger = messenger; + this.audioManager = audioManager; + } + + private void ensureInitialized() { + if (mFactory != null) { + return; + } + + PeerConnectionFactory.initialize( + InitializationOptions.builder(context) + .setEnableInternalTracer(true) + .createInitializationOptions()); + + // Initialize EGL contexts required for HW acceleration. + EglBase.Context eglContext = EglUtils.getRootEglBaseContext(); + + getUserMediaImpl = new GetUserMediaImpl(this, context); + + audioDeviceModule = JavaAudioDeviceModule.builder(context) + .setUseHardwareAcousticEchoCanceler(true) + .setUseHardwareNoiseSuppressor(true) + .setSamplesReadyCallback(getUserMediaImpl.inputSamplesInterceptor) + .createAudioDeviceModule(); + + getUserMediaImpl.audioDeviceModule = (JavaAudioDeviceModule) audioDeviceModule; + + mFactory = PeerConnectionFactory.builder() + .setOptions(new Options()) + .setVideoEncoderFactory(new DefaultVideoEncoderFactory(eglContext, false, true)) + .setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglContext)) + .setAudioDeviceModule(audioDeviceModule) + .createPeerConnectionFactory(); + } + + @Override + public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { + ensureInitialized(); + + final AnyThreadResult result = new AnyThreadResult(notSafeResult); + switch (call.method) { + case "createPeerConnection": { + Map constraints = call.argument("constraints"); + Map configuration = call.argument("configuration"); + String peerConnectionId = peerConnectionInit(new ConstraintsMap(configuration), + new ConstraintsMap((constraints))); + ConstraintsMap res = new ConstraintsMap(); + res.putString("peerConnectionId", peerConnectionId); + result.success(res.toMap()); + break; + } + case "getUserMedia": { + Map constraints = call.argument("constraints"); + ConstraintsMap constraintsMap = new ConstraintsMap(constraints); + getUserMedia(constraintsMap, result); + break; + } + case "createLocalMediaStream": + createLocalMediaStream(result); + break; + case "getSources": + getSources(result); + break; + case "createOffer": { + String peerConnectionId = call.argument("peerConnectionId"); + Map constraints = call.argument("constraints"); + peerConnectionCreateOffer(peerConnectionId, new ConstraintsMap(constraints), result); + break; + } + case "createAnswer": { + String peerConnectionId = call.argument("peerConnectionId"); + Map constraints = call.argument("constraints"); + peerConnectionCreateAnswer(peerConnectionId, new ConstraintsMap(constraints), result); + break; + } + case "mediaStreamGetTracks": { + String streamId = call.argument("streamId"); + MediaStream stream = getStreamForId(streamId, ""); + Map resultMap = new HashMap<>(); + List audioTracks = new ArrayList<>(); + List videoTracks = new ArrayList<>(); + for (AudioTrack track : stream.audioTracks) { + localTracks.put(track.id(), track); + Map trackMap = new HashMap<>(); + trackMap.put("enabled", track.enabled()); + trackMap.put("id", track.id()); + trackMap.put("kind", track.kind()); + trackMap.put("label", track.id()); + trackMap.put("readyState", "live"); + trackMap.put("remote", false); + audioTracks.add(trackMap); + } + for (VideoTrack track : stream.videoTracks) { + localTracks.put(track.id(), track); + Map trackMap = new HashMap<>(); + trackMap.put("enabled", track.enabled()); + trackMap.put("id", track.id()); + trackMap.put("kind", track.kind()); + trackMap.put("label", track.id()); + trackMap.put("readyState", "live"); + trackMap.put("remote", false); + videoTracks.add(trackMap); + } + resultMap.put("audioTracks", audioTracks); + resultMap.put("videoTracks", videoTracks); + result.success(resultMap); + break; + } + case "addStream": { + String streamId = call.argument("streamId"); + String peerConnectionId = call.argument("peerConnectionId"); + peerConnectionAddStream(streamId, peerConnectionId, result); + break; + } + case "removeStream": { + String streamId = call.argument("streamId"); + String peerConnectionId = call.argument("peerConnectionId"); + peerConnectionRemoveStream(streamId, peerConnectionId, result); + break; + } + case "setLocalDescription": { + String peerConnectionId = call.argument("peerConnectionId"); + Map description = call.argument("description"); + peerConnectionSetLocalDescription(new ConstraintsMap(description), peerConnectionId, + result); + break; + } + case "setRemoteDescription": { + String peerConnectionId = call.argument("peerConnectionId"); + Map description = call.argument("description"); + peerConnectionSetRemoteDescription(new ConstraintsMap(description), peerConnectionId, + result); + break; + } + case "addCandidate": { + String peerConnectionId = call.argument("peerConnectionId"); + Map candidate = call.argument("candidate"); + peerConnectionAddICECandidate(new ConstraintsMap(candidate), peerConnectionId, result); + break; + } + case "getStats": { + String peerConnectionId = call.argument("peerConnectionId"); + String trackId = call.argument("trackId"); + peerConnectionGetStats(trackId, peerConnectionId, result); + break; + } + case "createDataChannel": { + String peerConnectionId = call.argument("peerConnectionId"); + String label = call.argument("label"); + Map dataChannelDict = call.argument("dataChannelDict"); + createDataChannel(peerConnectionId, label, new ConstraintsMap(dataChannelDict), result); + break; + } + case "dataChannelSend": { + String peerConnectionId = call.argument("peerConnectionId"); + int dataChannelId = call.argument("dataChannelId"); + String type = call.argument("type"); + Boolean isBinary = type.equals("binary"); + ByteBuffer byteBuffer; + if (isBinary) { + byteBuffer = ByteBuffer.wrap(call.argument("data")); + } else { + try { + String data = call.argument("data"); + byteBuffer = ByteBuffer.wrap(data.getBytes("UTF-8")); + } catch (UnsupportedEncodingException e) { + Log.d(TAG, "Could not encode text string as UTF-8."); + result.error("dataChannelSendFailed", "Could not encode text string as UTF-8.", null); + return; + } + } + dataChannelSend(peerConnectionId, dataChannelId, byteBuffer, isBinary); + result.success(null); + break; + } + case "dataChannelClose": { + String peerConnectionId = call.argument("peerConnectionId"); + int dataChannelId = call.argument("dataChannelId"); + dataChannelClose(peerConnectionId, dataChannelId); + result.success(null); + break; + } + case "streamDispose": { + String streamId = call.argument("streamId"); + mediaStreamRelease(streamId); + result.success(null); + break; + } + case "mediaStreamTrackSetEnable": { + String trackId = call.argument("trackId"); + Boolean enabled = call.argument("enabled"); + MediaStreamTrack track = getTrackForId(trackId); + if (track != null) { + track.setEnabled(enabled); + } + result.success(null); + break; + } + case "mediaStreamAddTrack": { + String streamId = call.argument("streamId"); + String trackId = call.argument("trackId"); + mediaStreamAddTrack(streamId, trackId, result); + break; + } + case "mediaStreamRemoveTrack": { + String streamId = call.argument("streamId"); + String trackId = call.argument("trackId"); + mediaStreamRemoveTrack(streamId, trackId, result); + break; + } + case "trackDispose": { + String trackId = call.argument("trackId"); + localTracks.remove(trackId); + result.success(null); + break; + } + case "peerConnectionClose": { + String peerConnectionId = call.argument("peerConnectionId"); + peerConnectionClose(peerConnectionId); + result.success(null); + break; + } + case "peerConnectionDispose": { + String peerConnectionId = call.argument("peerConnectionId"); + peerConnectionDispose(peerConnectionId); + result.success(null); + break; + } + case "createVideoRenderer": { + SurfaceTextureEntry entry = textures.createSurfaceTexture(); + SurfaceTexture surfaceTexture = entry.surfaceTexture(); + FlutterRTCVideoRenderer render = new FlutterRTCVideoRenderer(surfaceTexture, entry); + renders.put(entry.id(), render); + + EventChannel eventChannel = + new EventChannel( + messenger, + "FlutterWebRTC/Texture" + entry.id()); + + eventChannel.setStreamHandler(render); + render.setEventChannel(eventChannel); + render.setId((int) entry.id()); + + ConstraintsMap params = new ConstraintsMap(); + params.putInt("textureId", (int) entry.id()); + result.success(params.toMap()); + break; + } + case "videoRendererDispose": { + int textureId = call.argument("textureId"); + FlutterRTCVideoRenderer render = renders.get(textureId); + if (render == null) { + result.error("FlutterRTCVideoRendererNotFound", "render [" + textureId + "] not found !", + null); + return; + } + render.Dispose(); + renders.delete(textureId); + result.success(null); + break; + } + case "videoRendererSetSrcObject": { + int textureId = call.argument("textureId"); + String streamId = call.argument("streamId"); + String peerConnectionId = call.argument("ownerTag"); + FlutterRTCVideoRenderer render = renders.get(textureId); + + if (render == null) { + result.error("FlutterRTCVideoRendererNotFound", "render [" + textureId + "] not found !", + null); + return; + } + + MediaStream stream = getStreamForId(streamId, peerConnectionId); + render.setStream(stream); + result.success(null); + break; + } + case "mediaStreamTrackHasTorch": { + String trackId = call.argument("trackId"); + getUserMediaImpl.hasTorch(trackId, result); + break; + } + case "mediaStreamTrackSetTorch": { + String trackId = call.argument("trackId"); + boolean torch = call.argument("torch"); + getUserMediaImpl.setTorch(trackId, torch, result); + break; + } + case "mediaStreamTrackSwitchCamera": { + String trackId = call.argument("trackId"); + getUserMediaImpl.switchCamera(trackId, result); + break; + } + case "setVolume": { + String trackId = call.argument("trackId"); + double volume = call.argument("volume"); + mediaStreamTrackSetVolume(trackId, volume); + result.success(null); + break; + } + case "setMicrophoneMute": + boolean mute = call.argument("mute"); + audioManager.setMicrophoneMute(mute); + result.success(null); + break; + case "enableSpeakerphone": + boolean enable = call.argument("enable"); + audioManager.setSpeakerphoneOn(enable); + result.success(null); + break; + case "getDisplayMedia": { + Map constraints = call.argument("constraints"); + ConstraintsMap constraintsMap = new ConstraintsMap(constraints); + getDisplayMedia(constraintsMap, result); + break; + } + case "startRecordToFile": + //This method can a lot of different exceptions + //so we should notify plugin user about them + try { + String path = call.argument("path"); + VideoTrack videoTrack = null; + String videoTrackId = call.argument("videoTrackId"); + if (videoTrackId != null) { + MediaStreamTrack track = getTrackForId(videoTrackId); + if (track instanceof VideoTrack) { + videoTrack = (VideoTrack) track; + } + } + AudioChannel audioChannel = null; + if (call.hasArgument("audioChannel")) { + audioChannel = AudioChannel.values()[(Integer) call.argument("audioChannel")]; + } + Integer recorderId = call.argument("recorderId"); + if (videoTrack != null || audioChannel != null) { + getUserMediaImpl.startRecordingToFile(path, recorderId, videoTrack, audioChannel); + result.success(null); + } else { + result.error("0", "No tracks", null); + } + } catch (Exception e) { + result.error("-1", e.getMessage(), e); + } + break; + case "stopRecordToFile": + Integer recorderId = call.argument("recorderId"); + getUserMediaImpl.stopRecording(recorderId); + result.success(null); + break; + case "captureFrame": + String path = call.argument("path"); + String videoTrackId = call.argument("trackId"); + if (videoTrackId != null) { + MediaStreamTrack track = getTrackForId(videoTrackId); + if (track instanceof VideoTrack) { + new FrameCapturer((VideoTrack) track, new File(path), result); + } else { + result.error(null, "It's not video track", null); + } + } else { + result.error(null, "Track is null", null); + } + break; + case "getLocalDescription": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection peerConnection = getPeerConnection(peerConnectionId); + if (peerConnection != null) { + SessionDescription sdp = peerConnection.getLocalDescription(); + ConstraintsMap params = new ConstraintsMap(); + params.putString("sdp", sdp.description); + params.putString("type", sdp.type.canonicalForm()); + result.success(params.toMap()); + } else { + Log.d(TAG, "getLocalDescription() peerConnection is null"); + result.error("getLocalDescriptionFailed", "getLocalDescription() peerConnection is null", + null); + } + break; + } + case "getRemoteDescription": { + String peerConnectionId = call.argument("peerConnectionId"); + PeerConnection peerConnection = getPeerConnection(peerConnectionId); + if (peerConnection != null) { + SessionDescription sdp = peerConnection.getRemoteDescription(); + ConstraintsMap params = new ConstraintsMap(); + params.putString("sdp", sdp.description); + params.putString("type", sdp.type.canonicalForm()); + result.success(params.toMap()); + } else { + Log.d(TAG, "getRemoteDescription() peerConnection is null"); + result + .error("getRemoteDescriptionFailed", "getRemoteDescription() peerConnection is null", + null); + } + break; + } + case "setConfiguration": { + String peerConnectionId = call.argument("peerConnectionId"); + Map configuration = call.argument("configuration"); + PeerConnection peerConnection = getPeerConnection(peerConnectionId); + if (peerConnection != null) { + peerConnectionSetConfiguration(new ConstraintsMap(configuration), peerConnection); + result.success(null); + } else { + Log.d(TAG, "setConfiguration() peerConnection is null"); + result.error("setConfigurationFailed", "setConfiguration() peerConnection is null", null); + } + break; + } + default: + result.notImplemented(); + break; + } + } + + private PeerConnection getPeerConnection(String id) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + return (pco == null) ? null : pco.getPeerConnection(); + } + + private List createIceServers(ConstraintsArray iceServersArray) { + final int size = (iceServersArray == null) ? 0 : iceServersArray.size(); + List iceServers = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + ConstraintsMap iceServerMap = iceServersArray.getMap(i); + boolean hasUsernameAndCredential = + iceServerMap.hasKey("username") && iceServerMap.hasKey("credential"); + if (iceServerMap.hasKey("url")) { + if (hasUsernameAndCredential) { + iceServers.add(IceServer.builder(iceServerMap.getString("url")) + .setUsername(iceServerMap.getString("username")) + .setPassword(iceServerMap.getString("credential")).createIceServer()); + } else { + iceServers.add( + IceServer.builder(iceServerMap.getString("url")).createIceServer()); + } + } else if (iceServerMap.hasKey("urls")) { + switch (iceServerMap.getType("urls")) { + case String: + if (hasUsernameAndCredential) { + iceServers.add(IceServer.builder(iceServerMap.getString("urls")) + .setUsername(iceServerMap.getString("username")) + .setPassword(iceServerMap.getString("credential")).createIceServer()); + } else { + iceServers.add(IceServer.builder(iceServerMap.getString("urls")) + .createIceServer()); + } + break; + case Array: + ConstraintsArray urls = iceServerMap.getArray("urls"); + List urlsList = new ArrayList<>(); + + for (int j = 0; j < urls.size(); j++) { + urlsList.add(urls.getString(j)); + } + + Builder builder = IceServer.builder(urlsList); + + if (hasUsernameAndCredential) { + builder + .setUsername(iceServerMap.getString("username")) + .setPassword(iceServerMap.getString("credential")); + } + + iceServers.add(builder.createIceServer()); + + break; + } + } + } + return iceServers; + } + + private RTCConfiguration parseRTCConfiguration(ConstraintsMap map) { + ConstraintsArray iceServersArray = null; + if (map != null) { + iceServersArray = map.getArray("iceServers"); + } + List iceServers = createIceServers(iceServersArray); + RTCConfiguration conf = new RTCConfiguration(iceServers); + if (map == null) { + return conf; + } + + // iceTransportPolicy (public api) + if (map.hasKey("iceTransportPolicy") + && map.getType("iceTransportPolicy") == ObjectType.String) { + final String v = map.getString("iceTransportPolicy"); + if (v != null) { + switch (v) { + case "all": // public + conf.iceTransportsType = IceTransportsType.ALL; + break; + case "relay": // public + conf.iceTransportsType = IceTransportsType.RELAY; + break; + case "nohost": + conf.iceTransportsType = IceTransportsType.NOHOST; + break; + case "none": + conf.iceTransportsType = IceTransportsType.NONE; + break; + } + } + } + + // bundlePolicy (public api) + if (map.hasKey("bundlePolicy") + && map.getType("bundlePolicy") == ObjectType.String) { + final String v = map.getString("bundlePolicy"); + if (v != null) { + switch (v) { + case "balanced": // public + conf.bundlePolicy = BundlePolicy.BALANCED; + break; + case "max-compat": // public + conf.bundlePolicy = BundlePolicy.MAXCOMPAT; + break; + case "max-bundle": // public + conf.bundlePolicy = BundlePolicy.MAXBUNDLE; + break; + } + } + } + + // rtcpMuxPolicy (public api) + if (map.hasKey("rtcpMuxPolicy") + && map.getType("rtcpMuxPolicy") == ObjectType.String) { + final String v = map.getString("rtcpMuxPolicy"); + if (v != null) { + switch (v) { + case "negotiate": // public + conf.rtcpMuxPolicy = RtcpMuxPolicy.NEGOTIATE; + break; + case "require": // public + conf.rtcpMuxPolicy = RtcpMuxPolicy.REQUIRE; + break; + } + } + } + + // FIXME: peerIdentity of type DOMString (public api) + // FIXME: certificates of type sequence (public api) + + // iceCandidatePoolSize of type unsigned short, defaulting to 0 + if (map.hasKey("iceCandidatePoolSize") + && map.getType("iceCandidatePoolSize") == ObjectType.Number) { + final int v = map.getInt("iceCandidatePoolSize"); + if (v > 0) { + conf.iceCandidatePoolSize = v; + } + } + + // sdpSemantics + if (map.hasKey("sdpSemantics") + && map.getType("sdpSemantics") == ObjectType.String) { + final String v = map.getString("sdpSemantics"); + if (v != null) { + switch (v) { + case "plan-b": + conf.sdpSemantics = SdpSemantics.PLAN_B; + break; + case "unified-plan": + conf.sdpSemantics = SdpSemantics.UNIFIED_PLAN; + break; + } + } + } + + // === below is private api in webrtc === + + // tcpCandidatePolicy (private api) + if (map.hasKey("tcpCandidatePolicy") + && map.getType("tcpCandidatePolicy") == ObjectType.String) { + final String v = map.getString("tcpCandidatePolicy"); + if (v != null) { + switch (v) { + case "enabled": + conf.tcpCandidatePolicy = TcpCandidatePolicy.ENABLED; + break; + case "disabled": + conf.tcpCandidatePolicy = TcpCandidatePolicy.DISABLED; + break; + } + } + } + + // candidateNetworkPolicy (private api) + if (map.hasKey("candidateNetworkPolicy") + && map.getType("candidateNetworkPolicy") == ObjectType.String) { + final String v = map.getString("candidateNetworkPolicy"); + if (v != null) { + switch (v) { + case "all": + conf.candidateNetworkPolicy = CandidateNetworkPolicy.ALL; + break; + case "low_cost": + conf.candidateNetworkPolicy = CandidateNetworkPolicy.LOW_COST; + break; + } + } + } + + // KeyType (private api) + if (map.hasKey("keyType") + && map.getType("keyType") == ObjectType.String) { + final String v = map.getString("keyType"); + if (v != null) { + switch (v) { + case "RSA": + conf.keyType = KeyType.RSA; + break; + case "ECDSA": + conf.keyType = KeyType.ECDSA; + break; + } + } + } + + // continualGatheringPolicy (private api) + if (map.hasKey("continualGatheringPolicy") + && map.getType("continualGatheringPolicy") == ObjectType.String) { + final String v = map.getString("continualGatheringPolicy"); + if (v != null) { + switch (v) { + case "gather_once": + conf.continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE; + break; + case "gather_continually": + conf.continualGatheringPolicy = ContinualGatheringPolicy.GATHER_CONTINUALLY; + break; + } + } + } + + // audioJitterBufferMaxPackets (private api) + if (map.hasKey("audioJitterBufferMaxPackets") + && map.getType("audioJitterBufferMaxPackets") == ObjectType.Number) { + final int v = map.getInt("audioJitterBufferMaxPackets"); + if (v > 0) { + conf.audioJitterBufferMaxPackets = v; + } + } + + // iceConnectionReceivingTimeout (private api) + if (map.hasKey("iceConnectionReceivingTimeout") + && map.getType("iceConnectionReceivingTimeout") == ObjectType.Number) { + final int v = map.getInt("iceConnectionReceivingTimeout"); + conf.iceConnectionReceivingTimeout = v; + } + + // iceBackupCandidatePairPingInterval (private api) + if (map.hasKey("iceBackupCandidatePairPingInterval") + && map.getType("iceBackupCandidatePairPingInterval") == ObjectType.Number) { + final int v = map.getInt("iceBackupCandidatePairPingInterval"); + conf.iceBackupCandidatePairPingInterval = v; + } + + // audioJitterBufferFastAccelerate (private api) + if (map.hasKey("audioJitterBufferFastAccelerate") + && map.getType("audioJitterBufferFastAccelerate") == ObjectType.Boolean) { + final boolean v = map.getBoolean("audioJitterBufferFastAccelerate"); + conf.audioJitterBufferFastAccelerate = v; + } + + // pruneTurnPorts (private api) + if (map.hasKey("pruneTurnPorts") + && map.getType("pruneTurnPorts") == ObjectType.Boolean) { + final boolean v = map.getBoolean("pruneTurnPorts"); + conf.pruneTurnPorts = v; + } + + // presumeWritableWhenFullyRelayed (private api) + if (map.hasKey("presumeWritableWhenFullyRelayed") + && map.getType("presumeWritableWhenFullyRelayed") == ObjectType.Boolean) { + final boolean v = map.getBoolean("presumeWritableWhenFullyRelayed"); + conf.presumeWritableWhenFullyRelayed = v; + } + + return conf; + } + + public String peerConnectionInit(ConstraintsMap configuration, ConstraintsMap constraints) { + String peerConnectionId = getNextStreamUUID(); + PeerConnectionObserver observer = new PeerConnectionObserver(this, messenger, peerConnectionId); + PeerConnection peerConnection + = mFactory.createPeerConnection( + parseRTCConfiguration(configuration), + parseMediaConstraints(constraints), + observer); + observer.setPeerConnection(peerConnection); + if (mPeerConnectionObservers.size() == 0) { + audioManager.onAudioManagerRequested(true); + } + mPeerConnectionObservers.put(peerConnectionId, observer); + return peerConnectionId; + } + + @Override + public Map getLocalStreams() { + return localStreams; + } + + @Override + public Map getLocalTracks() { + return localTracks; + } + + @Override + public String getNextStreamUUID() { + String uuid; + + do { + uuid = UUID.randomUUID().toString(); + } while (getStreamForId(uuid, "") != null); + + return uuid; + } + + @Override + public String getNextTrackUUID() { + String uuid; + + do { + uuid = UUID.randomUUID().toString(); + } while (getTrackForId(uuid) != null); + + return uuid; + } + + @Override + public PeerConnectionFactory getPeerConnectionFactory() { + return mFactory; + } + + @Nullable + @Override + public Activity getActivity() { + return activity; + } + + MediaStream getStreamForId(String id, String peerConnectionId) { + MediaStream stream = localStreams.get(id); + + if (stream == null) { + if (peerConnectionId.length() > 0) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + stream = pco.remoteStreams.get(id); + } else { + for (Entry entry : mPeerConnectionObservers + .entrySet()) { + PeerConnectionObserver pco = entry.getValue(); + stream = pco.remoteStreams.get(id); + if (stream != null) { + break; + } + } + } + } + + return stream; + } + + private MediaStreamTrack getTrackForId(String trackId) { + MediaStreamTrack track = localTracks.get(trackId); + + if (track == null) { + for (Entry entry : mPeerConnectionObservers.entrySet()) { + PeerConnectionObserver pco = entry.getValue(); + track = pco.remoteTracks.get(trackId); + if (track != null) { + break; + } + } + } + + return track; + } + + + public void getUserMedia(ConstraintsMap constraints, Result result) { + String streamId = getNextStreamUUID(); + MediaStream mediaStream = mFactory.createLocalMediaStream(streamId); + + if (mediaStream == null) { + // XXX The following does not follow the getUserMedia() algorithm + // specified by + // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia + // with respect to distinguishing the various causes of failure. + result.error( + /* type */ "getUserMediaFailed", + "Failed to create new media stream", null); + return; + } + + getUserMediaImpl.getUserMedia(constraints, result, mediaStream); + } + + public void getDisplayMedia(ConstraintsMap constraints, Result result) { + String streamId = getNextStreamUUID(); + MediaStream mediaStream = mFactory.createLocalMediaStream(streamId); + + if (mediaStream == null) { + // XXX The following does not follow the getUserMedia() algorithm + // specified by + // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia + // with respect to distinguishing the various causes of failure. + result.error( + /* type */ "getDisplayMedia", + "Failed to create new media stream", null); + return; + } + + getUserMediaImpl.getDisplayMedia(constraints, result, mediaStream); + } + + public void getSources(Result result) { + ConstraintsArray array = new ConstraintsArray(); + String[] names = new String[Camera.getNumberOfCameras()]; + + for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { + ConstraintsMap info = getCameraInfo(i); + if (info != null) { + array.pushMap(info); + } + } + + ConstraintsMap audio = new ConstraintsMap(); + audio.putString("label", "Audio"); + audio.putString("deviceId", "audio-1"); + audio.putString("facing", ""); + audio.putString("kind", "audioinput"); + array.pushMap(audio); + result.success(array); + } + + private void createLocalMediaStream(Result result) { + String streamId = getNextStreamUUID(); + MediaStream mediaStream = mFactory.createLocalMediaStream(streamId); + localStreams.put(streamId, mediaStream); + + if (mediaStream == null) { + result.error(/* type */ "createLocalMediaStream", "Failed to create new media stream", null); + return; + } + Map resultMap = new HashMap<>(); + resultMap.put("streamId", mediaStream.getId()); + result.success(resultMap); + } + + public void mediaStreamTrackStop(final String id) { + // Is this functionality equivalent to `mediaStreamTrackRelease()` ? + // if so, we should merge this two and remove track from stream as well. + MediaStreamTrack track = localTracks.get(id); + if (track == null) { + Log.d(TAG, "mediaStreamTrackStop() track is null"); + return; + } + track.setEnabled(false); + if (track.kind().equals("video")) { + getUserMediaImpl.removeVideoCapturer(id); + } + localTracks.remove(id); + // What exactly does `detached` mean in doc? + // see: https://www.w3.org/TR/mediacapture-streams/#track-detached + } + + public void mediaStreamTrackSetEnabled(final String id, final boolean enabled) { + MediaStreamTrack track = localTracks.get(id); + if (track == null) { + Log.d(TAG, "mediaStreamTrackSetEnabled() track is null"); + return; + } else if (track.enabled() == enabled) { + return; + } + track.setEnabled(enabled); + } + + public void mediaStreamTrackSetVolume(final String id, final double volume) { + MediaStreamTrack track = localTracks.get(id); + if (track != null && track instanceof AudioTrack) { + Log.d(TAG, "setVolume(): " + id + "," + volume); + try { + ((AudioTrack) track).setVolume(volume); + } catch (Exception e) { + Log.e(TAG, "setVolume(): error", e); + } + } else { + Log.w(TAG, "setVolume(): track not found: " + id); + } + } + + public void mediaStreamAddTrack(final String streaemId, final String trackId, Result result) { + MediaStream mediaStream = localStreams.get(streaemId); + if (mediaStream != null) { + MediaStreamTrack track = localTracks.get(trackId); + if (track != null) { + if (track.kind().equals("audio")) { + mediaStream.addTrack((AudioTrack) track); + } else if (track.kind().equals("video")) { + mediaStream.addTrack((VideoTrack) track); + } + } else { + String errorMsg = "mediaStreamAddTrack() track [" + trackId + "] is null"; + Log.d(TAG, errorMsg); + result.error("mediaStreamAddTrack", errorMsg, null); + } + } else { + String errorMsg = "mediaStreamAddTrack() stream [" + trackId + "] is null"; + Log.d(TAG, errorMsg); + result.error("mediaStreamAddTrack", errorMsg, null); + } + result.success(null); + } + + public void mediaStreamRemoveTrack(final String streaemId, final String trackId, Result result) { + MediaStream mediaStream = localStreams.get(streaemId); + if (mediaStream != null) { + MediaStreamTrack track = localTracks.get(trackId); + if (track != null) { + if (track.kind().equals("audio")) { + mediaStream.removeTrack((AudioTrack) track); + } else if (track.kind().equals("video")) { + mediaStream.removeTrack((VideoTrack) track); + } + } else { + String errorMsg = "mediaStreamRemoveTrack() track [" + trackId + "] is null"; + Log.d(TAG, errorMsg); + result.error("mediaStreamRemoveTrack", errorMsg, null); + } + } else { + String errorMsg = "mediaStreamRemoveTrack() stream [" + trackId + "] is null"; + Log.d(TAG, errorMsg); + result.error("mediaStreamRemoveTrack", errorMsg, null); + } + result.success(null); + } + + public void mediaStreamTrackRelease(final String streamId, final String _trackId) { + MediaStream stream = localStreams.get(streamId); + if (stream == null) { + Log.d(TAG, "mediaStreamTrackRelease() stream is null"); + return; + } + MediaStreamTrack track = localTracks.get(_trackId); + if (track == null) { + Log.d(TAG, "mediaStreamTrackRelease() track is null"); + return; + } + track.setEnabled(false); // should we do this? + localTracks.remove(_trackId); + if (track.kind().equals("audio")) { + stream.removeTrack((AudioTrack) track); + } else if (track.kind().equals("video")) { + stream.removeTrack((VideoTrack) track); + getUserMediaImpl.removeVideoCapturer(_trackId); + } + } + + public ConstraintsMap getCameraInfo(int index) { + CameraInfo info = new CameraInfo(); + + try { + Camera.getCameraInfo(index, info); + } catch (Exception e) { + Logging.e("CameraEnumerationAndroid", "getCameraInfo failed on index " + index, e); + return null; + } + ConstraintsMap params = new ConstraintsMap(); + String facing = info.facing == 1 ? "front" : "back"; + params.putString("label", + "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation); + params.putString("deviceId", "" + index); + params.putString("facing", facing); + params.putString("kind", "videoinput"); + return params; + } + + private MediaConstraints defaultConstraints() { + MediaConstraints constraints = new MediaConstraints(); + // TODO video media + constraints.mandatory.add(new KeyValuePair("OfferToReceiveAudio", "true")); + constraints.mandatory.add(new KeyValuePair("OfferToReceiveVideo", "true")); + constraints.optional.add(new KeyValuePair("DtlsSrtpKeyAgreement", "true")); + return constraints; + } + + public void peerConnectionSetConfiguration(ConstraintsMap configuration, + PeerConnection peerConnection) { + if (peerConnection == null) { + Log.d(TAG, "peerConnectionSetConfiguration() peerConnection is null"); + return; + } + peerConnection.setConfiguration(parseRTCConfiguration(configuration)); + } + + public void peerConnectionAddStream(final String streamId, final String id, Result result) { + MediaStream mediaStream = localStreams.get(streamId); + if (mediaStream == null) { + Log.d(TAG, "peerConnectionAddStream() mediaStream is null"); + return; + } + PeerConnection peerConnection = getPeerConnection(id); + if (peerConnection != null) { + boolean res = peerConnection.addStream(mediaStream); + Log.d(TAG, "addStream" + result); + result.success(res); + } else { + Log.d(TAG, "peerConnectionAddStream() peerConnection is null"); + result.error("peerConnectionAddStreamFailed", + "peerConnectionAddStream() peerConnection is null", null); + } + } + + public void peerConnectionRemoveStream(final String streamId, final String id, Result result) { + MediaStream mediaStream = localStreams.get(streamId); + if (mediaStream == null) { + Log.d(TAG, "peerConnectionRemoveStream() mediaStream is null"); + return; + } + PeerConnection peerConnection = getPeerConnection(id); + if (peerConnection != null) { + peerConnection.removeStream(mediaStream); + result.success(null); + } else { + Log.d(TAG, "peerConnectionRemoveStream() peerConnection is null"); + result.error("peerConnectionRemoveStreamFailed", + "peerConnectionAddStream() peerConnection is null", null); + } + } + + public void peerConnectionCreateOffer( + String id, + ConstraintsMap constraints, + final Result result) { + PeerConnection peerConnection = getPeerConnection(id); + + if (peerConnection != null) { + peerConnection.createOffer(new SdpObserver() { + @Override + public void onCreateFailure(String s) { + result.error("WEBRTC_CREATE_OFFER_ERROR", s, null); + } + + @Override + public void onCreateSuccess(final SessionDescription sdp) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("sdp", sdp.description); + params.putString("type", sdp.type.canonicalForm()); + result.success(params.toMap()); + } + + @Override + public void onSetFailure(String s) { + } + + @Override + public void onSetSuccess() { + } + }, parseMediaConstraints(constraints)); + } else { + Log.d(TAG, "peerConnectionCreateOffer() peerConnection is null"); + result.error("WEBRTC_CREATE_OFFER_ERROR", "peerConnection is null", null); + } + } + + public void peerConnectionCreateAnswer( + String id, + ConstraintsMap constraints, + final Result result) { + PeerConnection peerConnection = getPeerConnection(id); + + if (peerConnection != null) { + peerConnection.createAnswer(new SdpObserver() { + @Override + public void onCreateFailure(String s) { + result.error("WEBRTC_CREATE_ANSWER_ERROR", s, null); + } + + @Override + public void onCreateSuccess(final SessionDescription sdp) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("sdp", sdp.description); + params.putString("type", sdp.type.canonicalForm()); + result.success(params.toMap()); + } + + @Override + public void onSetFailure(String s) { + } + + @Override + public void onSetSuccess() { + } + }, parseMediaConstraints(constraints)); + } else { + Log.d(TAG, "peerConnectionCreateAnswer() peerConnection is null"); + result.error("WEBRTC_CREATE_ANSWER_ERROR", "peerConnection is null", null); + } + } + + public void peerConnectionSetLocalDescription(ConstraintsMap sdpMap, final String id, + final Result result) { + PeerConnection peerConnection = getPeerConnection(id); + + Log.d(TAG, "peerConnectionSetLocalDescription() start"); + if (peerConnection != null) { + SessionDescription sdp = new SessionDescription( + Type.fromCanonicalForm(sdpMap.getString("type")), + sdpMap.getString("sdp") + ); + + peerConnection.setLocalDescription(new SdpObserver() { + @Override + public void onCreateSuccess(final SessionDescription sdp) { + } + + @Override + public void onSetSuccess() { + result.success(null); + } + + @Override + public void onCreateFailure(String s) { + } + + @Override + public void onSetFailure(String s) { + result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", s, null); + } + }, sdp); + } else { + Log.d(TAG, "peerConnectionSetLocalDescription() peerConnection is null"); + result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", "peerConnection is null", null); + } + Log.d(TAG, "peerConnectionSetLocalDescription() end"); + } + + public void peerConnectionSetRemoteDescription(final ConstraintsMap sdpMap, final String id, + final Result result) { + PeerConnection peerConnection = getPeerConnection(id); + // final String d = sdpMap.getString("type"); + + Log.d(TAG, "peerConnectionSetRemoteDescription() start"); + if (peerConnection != null) { + SessionDescription sdp = new SessionDescription( + Type.fromCanonicalForm(sdpMap.getString("type")), + sdpMap.getString("sdp") + ); + + peerConnection.setRemoteDescription(new SdpObserver() { + @Override + public void onCreateSuccess(final SessionDescription sdp) { + } + + @Override + public void onSetSuccess() { + result.success(null); + } + + @Override + public void onCreateFailure(String s) { + } + + @Override + public void onSetFailure(String s) { + result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", s, null); + } + }, sdp); + } else { + Log.d(TAG, "peerConnectionSetRemoteDescription() peerConnection is null"); + result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", "peerConnection is null", null); + } + Log.d(TAG, "peerConnectionSetRemoteDescription() end"); + } + + public void peerConnectionAddICECandidate(ConstraintsMap candidateMap, final String id, + final Result result) { + boolean res = false; + PeerConnection peerConnection = getPeerConnection(id); + Log.d(TAG, "peerConnectionAddICECandidate() start"); + if (peerConnection != null) { + IceCandidate candidate = new IceCandidate( + candidateMap.getString("sdpMid"), + candidateMap.getInt("sdpMLineIndex"), + candidateMap.getString("candidate") + ); + res = peerConnection.addIceCandidate(candidate); + } else { + Log.d(TAG, "peerConnectionAddICECandidate() peerConnection is null"); + result.error("peerConnectionAddICECandidateFailed", + "peerConnectionAddICECandidate() peerConnection is null", null); + } + result.success(res); + Log.d(TAG, "peerConnectionAddICECandidate() end"); + } + + public void peerConnectionGetStats(String trackId, String id, final Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "peerConnectionGetStats() peerConnection is null"); + } else { + pco.getStats(trackId, result); + } + } + + public void peerConnectionClose(final String id) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "peerConnectionClose() peerConnection is null"); + } else { + pco.close(); + } + } + + public void peerConnectionDispose(final String id) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "peerConnectionDispose() peerConnection is null"); + } else { + pco.dispose(); + mPeerConnectionObservers.remove(id); + } + if (mPeerConnectionObservers.size() == 0) { + audioManager.onAudioManagerRequested(false); + } + } + + public void mediaStreamRelease(final String id) { + MediaStream mediaStream = localStreams.get(id); + if (mediaStream != null) { + for (VideoTrack track : mediaStream.videoTracks) { + localTracks.remove(track.id()); + getUserMediaImpl.removeVideoCapturer(track.id()); + } + for (AudioTrack track : mediaStream.audioTracks) { + localTracks.remove(track.id()); + } + localStreams.remove(id); + } else { + Log.d(TAG, "mediaStreamRelease() mediaStream is null"); + } + } + + public void createDataChannel(final String peerConnectionId, String label, ConstraintsMap config, + Result result) { + // Forward to PeerConnectionObserver which deals with DataChannels + // because DataChannel is owned by PeerConnection. + PeerConnectionObserver pco + = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "createDataChannel() peerConnection is null"); + } else { + pco.createDataChannel(label, config, result); + } + } + + public void dataChannelSend(String peerConnectionId, int dataChannelId, ByteBuffer bytebuffer, + Boolean isBinary) { + // Forward to PeerConnectionObserver which deals with DataChannels + // because DataChannel is owned by PeerConnection. + PeerConnectionObserver pco + = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "dataChannelSend() peerConnection is null"); + } else { + pco.dataChannelSend(dataChannelId, bytebuffer, isBinary); + } + } + + public void dataChannelClose(String peerConnectionId, int dataChannelId) { + // Forward to PeerConnectionObserver which deals with DataChannels + // because DataChannel is owned by PeerConnection. + PeerConnectionObserver pco + = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "dataChannelClose() peerConnection is null"); + } else { + pco.dataChannelClose(dataChannelId); + } + } + + public void setActivity(Activity activity) { + this.activity = activity; + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index 9c2d141b61..56db8a7509 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -1,20 +1,18 @@ package com.cloudwebrtc.webrtc; -import java.io.UnsupportedEncodingException; -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - -import android.util.Base64; import android.util.Log; import android.util.SparseArray; import androidx.annotation.Nullable; - import com.cloudwebrtc.webrtc.utils.AnyThreadSink; import com.cloudwebrtc.webrtc.utils.ConstraintsArray; import com.cloudwebrtc.webrtc.utils.ConstraintsMap; - +import io.flutter.plugin.common.BinaryMessenger; +import io.flutter.plugin.common.EventChannel; +import io.flutter.plugin.common.MethodChannel.Result; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; import org.webrtc.AudioTrack; import org.webrtc.DataChannel; import org.webrtc.IceCandidate; @@ -26,453 +24,447 @@ import org.webrtc.StatsReport; import org.webrtc.VideoTrack; -import io.flutter.plugin.common.EventChannel; -import io.flutter.plugin.common.MethodChannel.Result; - class PeerConnectionObserver implements PeerConnection.Observer, EventChannel.StreamHandler { - private final static String TAG = FlutterWebRTCPlugin.TAG; - - private final SparseArray dataChannels - = new SparseArray(); - private final String id; - private PeerConnection peerConnection; - final Map remoteStreams; - final Map remoteTracks; - private final FlutterWebRTCPlugin plugin; - - EventChannel eventChannel; - EventChannel.EventSink eventSink; - - PeerConnectionObserver(FlutterWebRTCPlugin plugin, String id) { - this.plugin = plugin; - this.id = id; - this.remoteStreams = new HashMap(); - this.remoteTracks = new HashMap(); - - - this.eventChannel = - new EventChannel( - plugin.registrar().messenger(), - "FlutterWebRTC/peerConnectoinEvent" + id); - eventChannel.setStreamHandler(this); - this.eventSink = null; - } - - @Override - public void onListen(Object o, EventChannel.EventSink sink) { - eventSink = new AnyThreadSink(sink); - } - @Override - public void onCancel(Object o) { - eventSink = null; + private final static String TAG = FlutterWebRTCPlugin.TAG; + + private final SparseArray dataChannels = new SparseArray<>(); + private BinaryMessenger messenger; + private final String id; + private PeerConnection peerConnection; + final Map remoteStreams = new HashMap<>(); + final Map remoteTracks = new HashMap<>(); + private final StateProvider stateProvider; + + private final EventChannel eventChannel; + private EventChannel.EventSink eventSink; + + PeerConnectionObserver(StateProvider stateProvider, BinaryMessenger messenger, String id) { + this.stateProvider = stateProvider; + this.messenger = messenger; + this.id = id; + + eventChannel = new EventChannel(messenger, "FlutterWebRTC/peerConnectoinEvent" + id); + eventChannel.setStreamHandler(this); + } + + @Override + public void onListen(Object o, EventChannel.EventSink sink) { + eventSink = new AnyThreadSink(sink); + } + + @Override + public void onCancel(Object o) { + eventSink = null; + } + + PeerConnection getPeerConnection() { + return peerConnection; + } + + void setPeerConnection(PeerConnection peerConnection) { + this.peerConnection = peerConnection; + } + + void close() { + peerConnection.close(); + remoteStreams.clear(); + remoteTracks.clear(); + dataChannels.clear(); + } + + void dispose() { + this.close(); + peerConnection.dispose(); + eventChannel.setStreamHandler(null); + } + + void createDataChannel(String label, ConstraintsMap config, Result result) { + DataChannel.Init init = new DataChannel.Init(); + if (config != null) { + if (config.hasKey("id")) { + init.id = config.getInt("id"); + } + if (config.hasKey("ordered")) { + init.ordered = config.getBoolean("ordered"); + } + if (config.hasKey("maxRetransmitTime")) { + init.maxRetransmitTimeMs = config.getInt("maxRetransmitTime"); + } + if (config.hasKey("maxRetransmits")) { + init.maxRetransmits = config.getInt("maxRetransmits"); + } + if (config.hasKey("protocol")) { + init.protocol = config.getString("protocol"); + } + if (config.hasKey("negotiated")) { + init.negotiated = config.getBoolean("negotiated"); + } } - - PeerConnection getPeerConnection() { - return peerConnection; + DataChannel dataChannel = peerConnection.createDataChannel(label, init); + // XXX RTP data channels are not defined by the WebRTC standard, have + // been deprecated in Chromium, and Google have decided (in 2015) to no + // longer support them (in the face of multiple reported issues of + // breakages). + int dataChannelId = init.id; + if (dataChannel != null && -1 != dataChannelId) { + dataChannels.put(dataChannelId, dataChannel); + registerDataChannelObserver(dataChannelId, dataChannel); + + ConstraintsMap params = new ConstraintsMap(); + params.putInt("id", dataChannel.id()); + params.putString("label", dataChannel.label()); + result.success(params.toMap()); + } else { + result.error("createDataChannel", + "Can't create data-channel for id: " + dataChannelId, + null); } - - void setPeerConnection(PeerConnection peerConnection) { - this.peerConnection = peerConnection; + } + + void dataChannelClose(int dataChannelId) { + DataChannel dataChannel = dataChannels.get(dataChannelId); + if (dataChannel != null) { + dataChannel.close(); + dataChannels.remove(dataChannelId); + } else { + Log.d(TAG, "dataChannelClose() dataChannel is null"); } - - void close() { - peerConnection.close(); - remoteStreams.clear(); - remoteTracks.clear(); - dataChannels.clear(); + } + + void dataChannelSend(int dataChannelId, ByteBuffer byteBuffer, Boolean isBinary) { + DataChannel dataChannel = dataChannels.get(dataChannelId); + if (dataChannel != null) { + DataChannel.Buffer buffer = new DataChannel.Buffer(byteBuffer, isBinary); + dataChannel.send(buffer); + } else { + Log.d(TAG, "dataChannelSend() dataChannel is null"); } + } + + void getStats(String trackId, final Result result) { + MediaStreamTrack track = null; + if (trackId == null + || trackId.isEmpty() + || (track = stateProvider.getLocalTracks().get(trackId)) != null + || (track = remoteTracks.get(trackId)) != null) { + peerConnection.getStats( + new StatsObserver() { + @Override + public void onComplete(StatsReport[] reports) { + + final int reportCount = reports.length; + ConstraintsMap params = new ConstraintsMap(); + ConstraintsArray stats = new ConstraintsArray(); + + for (int i = 0; i < reportCount; ++i) { + StatsReport report = reports[i]; + ConstraintsMap report_map = new ConstraintsMap(); + + report_map.putString("id", report.id); + report_map.putString("type", report.type); + report_map.putDouble("timestamp", report.timestamp); + + StatsReport.Value[] values = report.values; + ConstraintsMap v_map = new ConstraintsMap(); + final int valueCount = values.length; + for (int j = 0; j < valueCount; ++j) { + StatsReport.Value v = values[j]; + v_map.putString(v.name, v.value); + } - void dispose() { - this.close(); - peerConnection.dispose(); - eventChannel.setStreamHandler(null); - } + report_map.putMap("values", v_map.toMap()); + stats.pushMap(report_map); + } - void createDataChannel(String label, ConstraintsMap config, Result result) { - DataChannel.Init init = new DataChannel.Init(); - if (config != null) { - if (config.hasKey("id")) { - init.id = config.getInt("id"); - } - if (config.hasKey("ordered")) { - init.ordered = config.getBoolean("ordered"); - } - if (config.hasKey("maxRetransmitTime")) { - init.maxRetransmitTimeMs = config.getInt("maxRetransmitTime"); - } - if (config.hasKey("maxRetransmits")) { - init.maxRetransmits = config.getInt("maxRetransmits"); - } - if (config.hasKey("protocol")) { - init.protocol = config.getString("protocol"); - } - if (config.hasKey("negotiated")) { - init.negotiated = config.getBoolean("negotiated"); + params.putArray("stats", stats.toArrayList()); + result.success(params.toMap()); } - } - DataChannel dataChannel = peerConnection.createDataChannel(label, init); - // XXX RTP data channels are not defined by the WebRTC standard, have - // been deprecated in Chromium, and Google have decided (in 2015) to no - // longer support them (in the face of multiple reported issues of - // breakages). - int dataChannelId = init.id; - if (dataChannel != null && -1 != dataChannelId) { - dataChannels.put(dataChannelId, dataChannel); - registerDataChannelObserver(dataChannelId, dataChannel); - - ConstraintsMap params = new ConstraintsMap(); - params.putInt("id", dataChannel.id()); - params.putString("label", dataChannel.label()); - result.success(params.toMap()); - }else{ - result.error("createDataChannel", - "Can't create data-channel for id: " + dataChannelId, - null); - } + }, + track); + } else { + Log.e(TAG, "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId); + result.error("peerConnectionGetStats", + "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId, + null); } - - void dataChannelClose(int dataChannelId) { - DataChannel dataChannel = dataChannels.get(dataChannelId); - if (dataChannel != null) { - dataChannel.close(); - dataChannels.remove(dataChannelId); - } else { - Log.d(TAG, "dataChannelClose() dataChannel is null"); - } + } + + @Override + public void onIceCandidate(final IceCandidate candidate) { + Log.d(TAG, "onIceCandidate"); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onCandidate"); + ConstraintsMap candidateParams = new ConstraintsMap(); + candidateParams.putInt("sdpMLineIndex", candidate.sdpMLineIndex); + candidateParams.putString("sdpMid", candidate.sdpMid); + candidateParams.putString("candidate", candidate.sdp); + params.putMap("candidate", candidateParams.toMap()); + sendEvent(params); + } + + @Override + public void onIceCandidatesRemoved(final IceCandidate[] candidates) { + Log.d(TAG, "onIceCandidatesRemoved"); + } + + @Override + public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "iceConnectionState"); + params.putString("state", iceConnectionStateString(iceConnectionState)); + sendEvent(params); + } + + @Override + public void onIceConnectionReceivingChange(boolean var1) { + } + + @Override + public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) { + Log.d(TAG, "onIceGatheringChange" + iceGatheringState.name()); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "iceGatheringState"); + params.putString("state", iceGatheringStateString(iceGatheringState)); + sendEvent(params); + } + + private String getUIDForStream(MediaStream mediaStream) { + for (Iterator> i + = remoteStreams.entrySet().iterator(); + i.hasNext(); ) { + Map.Entry e = i.next(); + if (e.getValue().equals(mediaStream)) { + return e.getKey(); + } } - - void dataChannelSend(int dataChannelId, ByteBuffer byteBuffer, Boolean isBinary) { - DataChannel dataChannel = dataChannels.get(dataChannelId); - if (dataChannel != null) { - DataChannel.Buffer buffer = new DataChannel.Buffer(byteBuffer, isBinary); - dataChannel.send(buffer); - } else { - Log.d(TAG, "dataChannelSend() dataChannel is null"); + return null; + } + + @Override + public void onAddStream(MediaStream mediaStream) { + String streamUID = null; + String streamId = mediaStream.getId(); + // The native WebRTC implementation has a special concept of a default + // MediaStream instance with the label default that the implementation + // reuses. + if ("default".equals(streamId)) { + for (Map.Entry e + : remoteStreams.entrySet()) { + if (e.getValue().equals(mediaStream)) { + streamUID = e.getKey(); + break; } + } } - void getStats(String trackId, final Result result) { - MediaStreamTrack track = null; - if (trackId == null - || trackId.isEmpty() - || (track = plugin.localTracks.get(trackId)) != null - || (track = remoteTracks.get(trackId)) != null) { - peerConnection.getStats( - new StatsObserver() { - @Override - public void onComplete(StatsReport[] reports) { - - final int reportCount = reports.length; - ConstraintsMap params = new ConstraintsMap(); - ConstraintsArray stats = new ConstraintsArray(); - - for (int i = 0; i < reportCount; ++i) { - StatsReport report = reports[i]; - ConstraintsMap report_map = new ConstraintsMap(); - - report_map.putString("id", report.id); - report_map.putString("type", report.type); - report_map.putDouble("timestamp", report.timestamp); - - StatsReport.Value[] values = report.values; - ConstraintsMap v_map = new ConstraintsMap(); - final int valueCount = values.length; - for (int j = 0; j < valueCount; ++j) { - StatsReport.Value v = values[j]; - v_map.putString(v.name, v.value); - } - - report_map.putMap("values", v_map.toMap()); - stats.pushMap(report_map); - } - - params.putArray("stats", stats.toArrayList()); - result.success(params.toMap()); - } - }, - track); - } else { - Log.e(TAG, "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId); - result.error("peerConnectionGetStats", - "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId, - null); - } + if (streamUID == null) { + streamUID = stateProvider.getNextStreamUUID(); + remoteStreams.put(streamId, mediaStream); } - @Override - public void onIceCandidate(final IceCandidate candidate) { - Log.d(TAG, "onIceCandidate"); - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onCandidate"); - ConstraintsMap candidateParams = new ConstraintsMap(); - candidateParams.putInt("sdpMLineIndex", candidate.sdpMLineIndex); - candidateParams.putString("sdpMid", candidate.sdpMid); - candidateParams.putString("candidate", candidate.sdp); - params.putMap("candidate", candidateParams.toMap()); - sendEvent(params); - } + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onAddStream"); + params.putString("streamId", streamId); - @Override - public void onIceCandidatesRemoved(final IceCandidate[] candidates) { - Log.d(TAG, "onIceCandidatesRemoved"); - } + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); - @Override - public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) { - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "iceConnectionState"); - params.putString("state", iceConnectionStateString(iceConnectionState)); - sendEvent(params); - } + for (int i = 0; i < mediaStream.videoTracks.size(); i++) { + VideoTrack track = mediaStream.videoTracks.get(i); + String trackId = track.id(); - @Override - public void onIceConnectionReceivingChange(boolean var1) { - } + remoteTracks.put(trackId, track); - @Override - public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) { - Log.d(TAG, "onIceGatheringChange" + iceGatheringState.name()); - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "iceGatheringState"); - params.putString("state", iceGatheringStateString(iceGatheringState)); - sendEvent(params); + ConstraintsMap trackInfo = new ConstraintsMap(); + trackInfo.putString("id", trackId); + trackInfo.putString("label", "Video"); + trackInfo.putString("kind", track.kind()); + trackInfo.putBoolean("enabled", track.enabled()); + trackInfo.putString("readyState", track.state().toString()); + trackInfo.putBoolean("remote", true); + videoTracks.pushMap(trackInfo); } - - private String getUIDForStream(MediaStream mediaStream) { - for (Iterator> i - = remoteStreams.entrySet().iterator(); - i.hasNext();) { - Map.Entry e = i.next(); - if (e.getValue().equals(mediaStream)) { - return e.getKey(); - } - } - return null; + for (int i = 0; i < mediaStream.audioTracks.size(); i++) { + AudioTrack track = mediaStream.audioTracks.get(i); + String trackId = track.id(); + + remoteTracks.put(trackId, track); + + ConstraintsMap trackInfo = new ConstraintsMap(); + trackInfo.putString("id", trackId); + trackInfo.putString("label", "Audio"); + trackInfo.putString("kind", track.kind()); + trackInfo.putBoolean("enabled", track.enabled()); + trackInfo.putString("readyState", track.state().toString()); + trackInfo.putBoolean("remote", true); + audioTracks.pushMap(trackInfo); } + params.putArray("audioTracks", audioTracks.toArrayList()); + params.putArray("videoTracks", videoTracks.toArrayList()); - @Override - public void onAddStream(MediaStream mediaStream) { - String streamUID = null; - String streamId = mediaStream.getId(); - // The native WebRTC implementation has a special concept of a default - // MediaStream instance with the label default that the implementation - // reuses. - if ("default".equals(streamId)) { - for (Map.Entry e - : remoteStreams.entrySet()) { - if (e.getValue().equals(mediaStream)) { - streamUID = e.getKey(); - break; - } - } - } - - if (streamUID == null){ - streamUID = plugin.getNextStreamUUID(); - remoteStreams.put(streamId, mediaStream); - } - - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onAddStream"); - params.putString("streamId", streamId); + sendEvent(params); + } - ConstraintsArray audioTracks = new ConstraintsArray(); - ConstraintsArray videoTracks = new ConstraintsArray(); - for (int i = 0; i < mediaStream.videoTracks.size(); i++) { - VideoTrack track = mediaStream.videoTracks.get(i); - String trackId = track.id(); - - remoteTracks.put(trackId, track); - - ConstraintsMap trackInfo = new ConstraintsMap(); - trackInfo.putString("id", trackId); - trackInfo.putString("label", "Video"); - trackInfo.putString("kind", track.kind()); - trackInfo.putBoolean("enabled", track.enabled()); - trackInfo.putString("readyState", track.state().toString()); - trackInfo.putBoolean("remote", true); - videoTracks.pushMap(trackInfo); - } - for (int i = 0; i < mediaStream.audioTracks.size(); i++) { - AudioTrack track = mediaStream.audioTracks.get(i); - String trackId = track.id(); - - remoteTracks.put(trackId, track); - - ConstraintsMap trackInfo = new ConstraintsMap(); - trackInfo.putString("id", trackId); - trackInfo.putString("label", "Audio"); - trackInfo.putString("kind", track.kind()); - trackInfo.putBoolean("enabled", track.enabled()); - trackInfo.putString("readyState", track.state().toString()); - trackInfo.putBoolean("remote", true); - audioTracks.pushMap(trackInfo); - } - params.putArray("audioTracks", audioTracks.toArrayList()); - params.putArray("videoTracks", videoTracks.toArrayList()); - - sendEvent(params); + void sendEvent(ConstraintsMap event) { + if (eventSink != null) { + eventSink.success(event.toMap()); } + } + @Override + public void onRemoveStream(MediaStream mediaStream) { - void sendEvent(ConstraintsMap event) { - if(eventSink != null ) - eventSink.success(event.toMap()); - } - - @Override - public void onRemoveStream(MediaStream mediaStream) { + String streamId = mediaStream.getId(); - String streamId = mediaStream.getId(); - - for (VideoTrack track : mediaStream.videoTracks) { - this.remoteTracks.remove(track.id()); - } - for (AudioTrack track : mediaStream.audioTracks) { - this.remoteTracks.remove(track.id()); - } - - this.remoteStreams.remove(streamId); - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onRemoveStream"); - params.putString("streamId", streamId); - sendEvent(params); - } - - @Override - public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams){ - Log.d(TAG, "onAddTrack"); - for (MediaStream stream : mediaStreams) { - String streamId = stream.getId(); - MediaStreamTrack track = receiver.track(); - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onAddTrack"); - params.putString("streamId", streamId); - params.putString("trackId", track.id()); - - String trackId = track.id(); - ConstraintsMap trackInfo = new ConstraintsMap(); - trackInfo.putString("id", trackId); - trackInfo.putString("label", track.kind()); - trackInfo.putString("kind", track.kind()); - trackInfo.putBoolean("enabled", track.enabled()); - trackInfo.putString("readyState", track.state().toString()); - trackInfo.putBoolean("remote", true); - params.putMap("track", trackInfo.toMap()); - sendEvent(params); - } + for (VideoTrack track : mediaStream.videoTracks) { + this.remoteTracks.remove(track.id()); } - @Override - public void onDataChannel(DataChannel dataChannel) { - // XXX Unfortunately, the Java WebRTC API doesn't expose the id - // of the underlying C++/native DataChannel (even though the - // WebRTC standard defines the DataChannel.id property). As a - // workaround, generated an id which will surely not clash with - // the ids of the remotely-opened (and standard-compliant - // locally-opened) DataChannels. - int dataChannelId = -1; - // The RTCDataChannel.id space is limited to unsigned short by - // the standard: - // https://www.w3.org/TR/webrtc/#dom-datachannel-id. - // Additionally, 65535 is reserved due to SCTP INIT and - // INIT-ACK chunks only allowing a maximum of 65535 streams to - // be negotiated (as defined by the WebRTC Data Channel - // Establishment Protocol). - for (int i = 65536; i <= Integer.MAX_VALUE; ++i) { - if (null == dataChannels.get(i, null)) { - dataChannelId = i; - break; - } - } - if (-1 == dataChannelId) { - return; - } - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "didOpenDataChannel"); - params.putInt("id", dataChannelId); - params.putString("label", dataChannel.label()); - - dataChannels.put(dataChannelId, dataChannel); - registerDataChannelObserver(dataChannelId, dataChannel); - - sendEvent(params); + for (AudioTrack track : mediaStream.audioTracks) { + this.remoteTracks.remove(track.id()); } - private void registerDataChannelObserver(int dcId, DataChannel dataChannel) { - // DataChannel.registerObserver implementation does not allow to - // unregister, so the observer is registered here and is never - // unregistered - dataChannel.registerObserver( - new DataChannelObserver(plugin, id, dcId, dataChannel)); + this.remoteStreams.remove(streamId); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onRemoveStream"); + params.putString("streamId", streamId); + sendEvent(params); + } + + @Override + public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) { + Log.d(TAG, "onAddTrack"); + for (MediaStream stream : mediaStreams) { + String streamId = stream.getId(); + MediaStreamTrack track = receiver.track(); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onAddTrack"); + params.putString("streamId", streamId); + params.putString("trackId", track.id()); + + String trackId = track.id(); + ConstraintsMap trackInfo = new ConstraintsMap(); + trackInfo.putString("id", trackId); + trackInfo.putString("label", track.kind()); + trackInfo.putString("kind", track.kind()); + trackInfo.putBoolean("enabled", track.enabled()); + trackInfo.putString("readyState", track.state().toString()); + trackInfo.putBoolean("remote", true); + params.putMap("track", trackInfo.toMap()); + sendEvent(params); } - - @Override - public void onRenegotiationNeeded() { - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onRenegotiationNeeded"); - sendEvent(params); + } + + @Override + public void onDataChannel(DataChannel dataChannel) { + // XXX Unfortunately, the Java WebRTC API doesn't expose the id + // of the underlying C++/native DataChannel (even though the + // WebRTC standard defines the DataChannel.id property). As a + // workaround, generated an id which will surely not clash with + // the ids of the remotely-opened (and standard-compliant + // locally-opened) DataChannels. + int dataChannelId = -1; + // The RTCDataChannel.id space is limited to unsigned short by + // the standard: + // https://www.w3.org/TR/webrtc/#dom-datachannel-id. + // Additionally, 65535 is reserved due to SCTP INIT and + // INIT-ACK chunks only allowing a maximum of 65535 streams to + // be negotiated (as defined by the WebRTC Data Channel + // Establishment Protocol). + for (int i = 65536; i <= Integer.MAX_VALUE; ++i) { + if (null == dataChannels.get(i, null)) { + dataChannelId = i; + break; + } } - - @Override - public void onSignalingChange(PeerConnection.SignalingState signalingState) { - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "signalingState"); - params.putString("state", signalingStateString(signalingState)); - sendEvent(params); + if (-1 == dataChannelId) { + return; } - - @Nullable - private String iceConnectionStateString(PeerConnection.IceConnectionState iceConnectionState) { - switch (iceConnectionState) { - case NEW: - return "new"; - case CHECKING: - return "checking"; - case CONNECTED: - return "connected"; - case COMPLETED: - return "completed"; - case FAILED: - return "failed"; - case DISCONNECTED: - return "disconnected"; - case CLOSED: - return "closed"; - } - return null; + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "didOpenDataChannel"); + params.putInt("id", dataChannelId); + params.putString("label", dataChannel.label()); + + dataChannels.put(dataChannelId, dataChannel); + registerDataChannelObserver(dataChannelId, dataChannel); + + sendEvent(params); + } + + private void registerDataChannelObserver(int dcId, DataChannel dataChannel) { + // DataChannel.registerObserver implementation does not allow to + // unregister, so the observer is registered here and is never + // unregistered + dataChannel.registerObserver( + new DataChannelObserver(messenger, id, dcId, dataChannel)); + } + + @Override + public void onRenegotiationNeeded() { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onRenegotiationNeeded"); + sendEvent(params); + } + + @Override + public void onSignalingChange(PeerConnection.SignalingState signalingState) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "signalingState"); + params.putString("state", signalingStateString(signalingState)); + sendEvent(params); + } + + @Nullable + private String iceConnectionStateString(PeerConnection.IceConnectionState iceConnectionState) { + switch (iceConnectionState) { + case NEW: + return "new"; + case CHECKING: + return "checking"; + case CONNECTED: + return "connected"; + case COMPLETED: + return "completed"; + case FAILED: + return "failed"; + case DISCONNECTED: + return "disconnected"; + case CLOSED: + return "closed"; } - - @Nullable - private String iceGatheringStateString(PeerConnection.IceGatheringState iceGatheringState) { - switch (iceGatheringState) { - case NEW: - return "new"; - case GATHERING: - return "gathering"; - case COMPLETE: - return "complete"; - } - return null; + return null; + } + + @Nullable + private String iceGatheringStateString(PeerConnection.IceGatheringState iceGatheringState) { + switch (iceGatheringState) { + case NEW: + return "new"; + case GATHERING: + return "gathering"; + case COMPLETE: + return "complete"; } - - @Nullable - private String signalingStateString(PeerConnection.SignalingState signalingState) { - switch (signalingState) { - case STABLE: - return "stable"; - case HAVE_LOCAL_OFFER: - return "have-local-offer"; - case HAVE_LOCAL_PRANSWER: - return "have-local-pranswer"; - case HAVE_REMOTE_OFFER: - return "have-remote-offer"; - case HAVE_REMOTE_PRANSWER: - return "have-remote-pranswer"; - case CLOSED: - return "closed"; - } - return null; + return null; + } + + @Nullable + private String signalingStateString(PeerConnection.SignalingState signalingState) { + switch (signalingState) { + case STABLE: + return "stable"; + case HAVE_LOCAL_OFFER: + return "have-local-offer"; + case HAVE_LOCAL_PRANSWER: + return "have-local-pranswer"; + case HAVE_REMOTE_OFFER: + return "have-remote-offer"; + case HAVE_REMOTE_PRANSWER: + return "have-remote-pranswer"; + case CLOSED: + return "closed"; } + return null; + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java b/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java new file mode 100644 index 0000000000..6c0c9f3a5b --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java @@ -0,0 +1,29 @@ +package com.cloudwebrtc.webrtc; + +import android.app.Activity; +import androidx.annotation.Nullable; +import java.util.Map; +import org.webrtc.MediaStream; +import org.webrtc.MediaStreamTrack; +import org.webrtc.PeerConnectionFactory; + +/** + * Provides interested components with access to the current application state. + * + * It is encouraged to use this class instead of a component directly. + */ +public interface StateProvider { + + Map getLocalStreams(); + + Map getLocalTracks(); + + String getNextStreamUUID(); + + String getNextTrackUUID(); + + PeerConnectionFactory getPeerConnectionFactory(); + + @Nullable + Activity getActivity(); +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java new file mode 100644 index 0000000000..ce41031a8c --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java @@ -0,0 +1,92 @@ +package com.cloudwebrtc.webrtc.utils; + +import android.util.Log; +import java.util.List; +import java.util.Map.Entry; +import org.webrtc.MediaConstraints; +import org.webrtc.MediaConstraints.KeyValuePair; + +public class MediaConstraintsUtils { + + static public final String TAG = "MediaConstraintsUtils"; + + /** + * Parses mandatory and optional "GUM" constraints described by a specific + * ConstraintsMap. + * + * @param constraints A ConstraintsMap which represents a JavaScript object specifying + * the constraints to be parsed into a + * MediaConstraints instance. + * @return A new MediaConstraints instance initialized with the mandatory and optional + * constraint keys and values specified by + * constraints. + */ + public static MediaConstraints parseMediaConstraints(ConstraintsMap constraints) { + MediaConstraints mediaConstraints = new MediaConstraints(); + + if (constraints.hasKey("mandatory") + && constraints.getType("mandatory") == ObjectType.Map) { + parseConstraints(constraints.getMap("mandatory"), + mediaConstraints.mandatory); + } else { + Log.d(TAG, "mandatory constraints are not a map"); + } + + if (constraints.hasKey("optional") + && constraints.getType("optional") == ObjectType.Array) { + ConstraintsArray optional = constraints.getArray("optional"); + + for (int i = 0, size = optional.size(); i < size; i++) { + if (optional.getType(i) == ObjectType.Map) { + parseConstraints( + optional.getMap(i), + mediaConstraints.optional); + } + } + } else { + Log.d(TAG, "optional constraints are not an array"); + } + + return mediaConstraints; + } + + /** + * Parses a constraint set specified in the form of a JavaScript object into a specific + * List of MediaConstraints.KeyValuePairs. + * + * @param src The constraint set in the form of a JavaScript object to parse. + * @param dst The List of MediaConstraints.KeyValuePairs into which the + * specified src is to be parsed. + */ + private static void parseConstraints( + ConstraintsMap src, + List dst) { + + for (Entry entry : src.toMap().entrySet()) { + String key = entry.getKey(); + String value = getMapStrValue(src, entry.getKey()); + dst.add(new KeyValuePair(key, value)); + } + } + + private static String getMapStrValue(ConstraintsMap map, String key) { + if (!map.hasKey(key)) { + return null; + } + ObjectType type = map.getType(key); + switch (type) { + case Boolean: + return String.valueOf(map.getBoolean(key)); + case Number: + // Don't know how to distinguish between Int and Double from + // ReadableType.Number. 'getInt' will fail on double value, + // while 'getDouble' works for both. + // return String.valueOf(map.getInt(key)); + return String.valueOf(map.getDouble(key)); + case String: + return map.getString(key); + default: + return null; + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java index d8c6216804..7359b741ab 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java @@ -5,13 +5,12 @@ import android.app.FragmentTransaction; import android.content.pm.PackageManager; import android.os.Build; +import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.ResultReceiver; - -import com.cloudwebrtc.webrtc.FlutterWebRTCPlugin; - +import androidx.annotation.RequiresApi; import java.util.ArrayList; /** @@ -36,7 +35,7 @@ public class PermissionUtils { private static int requestCode; private static void requestPermissions( - FlutterWebRTCPlugin plugin, + Activity activity, String[] permissions, ResultReceiver resultReceiver) { // Ask the Context whether we have already been granted the requested @@ -51,7 +50,7 @@ private static void requestPermissions( if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) grantResult = PackageManager.PERMISSION_GRANTED; else - grantResult = plugin.getContext().checkSelfPermission(permissions[i]); + grantResult = activity.checkSelfPermission(permissions[i]); grantResults[i] = grantResult; if (grantResult != PackageManager.PERMISSION_GRANTED) { @@ -73,18 +72,12 @@ private static void requestPermissions( // must still use old permissions model, regardless of the // Android version on the device. || Build.VERSION.SDK_INT < Build.VERSION_CODES.M - || plugin.getActivity().getApplicationInfo().targetSdkVersion + || activity.getApplicationInfo().targetSdkVersion < Build.VERSION_CODES.M) { send(resultReceiver, requestCode, permissions, grantResults); return; } - Activity activity = plugin.getActivity(); - - if (activity == null) { - return; - } - Bundle args = new Bundle(); args.putInt(REQUEST_CODE, requestCode); args.putParcelable(RESULT_RECEIVER, resultReceiver); @@ -92,7 +85,6 @@ private static void requestPermissions( RequestPermissionsFragment fragment = new RequestPermissionsFragment(); fragment.setArguments(args); - fragment.setPlugin(plugin); FragmentTransaction transaction = activity.getFragmentManager().beginTransaction().add( @@ -107,12 +99,13 @@ private static void requestPermissions( } } + @RequiresApi(api = VERSION_CODES.M) public static void requestPermissions( - final FlutterWebRTCPlugin plugin, + final Activity activity, final String[] permissions, final Callback callback) { requestPermissions( - plugin, + activity, permissions, new ResultReceiver(new Handler(Looper.getMainLooper())) { @Override @@ -150,11 +143,7 @@ public interface Callback { * using a ResultReceiver. */ public static class RequestPermissionsFragment extends Fragment { - private FlutterWebRTCPlugin plugin; - - public void setPlugin(FlutterWebRTCPlugin plugin){ - this.plugin = plugin; - } + @RequiresApi(api = VERSION_CODES.M) private void checkSelfPermissions(boolean requestPermissions) { // Figure out which of the requested permissions are actually denied // because we do not want to ask about the granted permissions @@ -211,6 +200,7 @@ private void finish() { } } + @RequiresApi(api = VERSION_CODES.M) @Override public void onRequestPermissionsResult( int requestCode, @@ -231,7 +221,7 @@ public void onRequestPermissionsResult( // the invocation so we have to redo the permission request. finish(); PermissionUtils.requestPermissions( - plugin, + getActivity(), args.getStringArray(PERMISSIONS), (ResultReceiver) args.getParcelable(RESULT_RECEIVER)); } else { From d23899f0cac82a6c21d0207bfb41ba9ddfd9966f Mon Sep 17 00:00:00 2001 From: Sebastian Roth Date: Sun, 7 Jun 2020 13:42:10 +0100 Subject: [PATCH 3/6] Adds additional dispose method to the MethodCallHandlerImpl --- .../main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java | 3 ++- .../java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java | 4 ++++ .../java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java | 1 + 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java index 4b7d248b28..5351e7f6d2 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java @@ -117,8 +117,9 @@ public void setSpeakerphoneOn(boolean on) { } private void stopListening() { - channel.setMethodCallHandler(null); + methodCallHandler.dispose(); methodCallHandler = null; + channel.setMethodCallHandler(null); if (rtcAudioManager != null) { Log.d(TAG, "Stopping the audio manager..."); diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index 8a207fbfc1..a4e18a0344 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -115,6 +115,10 @@ interface AudioManager { this.audioManager = audioManager; } + void dispose() { + mPeerConnectionObservers.clear(); + } + private void ensureInitialized() { if (mFactory != null) { return; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index 56db8a7509..532718ac7b 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -75,6 +75,7 @@ void close() { void dispose() { this.close(); + peerConnection.dispose(); eventChannel.setStreamHandler(null); } From c64d87c094d1fb12cccdec63eebfb98be518ead9 Mon Sep 17 00:00:00 2001 From: Sebastian Roth Date: Sat, 13 Jun 2020 10:01:27 +0100 Subject: [PATCH 4/6] Do not request explicit permissions on pre-M for camera --- .../cloudwebrtc/webrtc/GetUserMediaImpl.java | 507 +++++++++--------- .../webrtc/utils/PermissionUtils.java | 11 +- 2 files changed, 249 insertions(+), 269 deletions(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java index ce39424275..aa23346888 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java @@ -121,10 +121,11 @@ public void screenRequestPremissions(ResultReceiver resultReceiver) { ScreenRequestPermissionsFragment fragment = new ScreenRequestPermissionsFragment(); fragment.setArguments(args); - FragmentTransaction transaction - = activity.getFragmentManager().beginTransaction().add( - fragment, - fragment.getClass().getName()); + FragmentTransaction transaction = + activity + .getFragmentManager() + .beginTransaction() + .add(fragment, fragment.getClass().getName()); try { transaction.commit(); @@ -151,13 +152,13 @@ private void checkSelfPermissions(boolean requestPermissions) { public void requestStart(Activity activity, int requestCode) { if (android.os.Build.VERSION.SDK_INT < minAPILevel) { - Log.w(TAG, + Log.w( + TAG, "Can't run requestStart() due to a low API level. API level 21 or higher is required."); return; } else { MediaProjectionManager mediaProjectionManager = - (MediaProjectionManager) activity.getSystemService( - Context.MEDIA_PROJECTION_SERVICE); + (MediaProjectionManager) activity.getSystemService(Context.MEDIA_PROJECTION_SERVICE); // call for the projection manager this.startActivityForResult( @@ -165,7 +166,6 @@ public void requestStart(Activity activity, int requestCode) { } } - @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); @@ -190,9 +190,7 @@ public void onActivityResult(int requestCode, int resultCode, Intent data) { private void finish() { Activity activity = getActivity(); if (activity != null) { - activity.getFragmentManager().beginTransaction() - .remove(this) - .commitAllowingStateLoss(); + activity.getFragmentManager().beginTransaction().remove(this).commitAllowingStateLoss(); } } @@ -212,37 +210,33 @@ public void onResume() { * Includes default constraints set for the audio media type. * * @param audioConstraints MediaConstraints instance to be filled with the default - * constraints for audio media type. + * constraints for audio media type. */ private void addDefaultAudioConstraints(MediaConstraints audioConstraints) { audioConstraints.optional.add( new MediaConstraints.KeyValuePair("googNoiseSuppression", "true")); audioConstraints.optional.add( new MediaConstraints.KeyValuePair("googEchoCancellation", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("echoCancellation", "true")); + audioConstraints.optional.add(new MediaConstraints.KeyValuePair("echoCancellation", "true")); audioConstraints.optional.add( new MediaConstraints.KeyValuePair("googEchoCancellation2", "true")); audioConstraints.optional.add( - new MediaConstraints.KeyValuePair( - "googDAEchoCancellation", "true")); + new MediaConstraints.KeyValuePair("googDAEchoCancellation", "true")); } /** * Create video capturer via given facing mode * * @param enumerator a CameraEnumerator provided by webrtc it can be Camera1Enumerator or - * Camera2Enumerator + * Camera2Enumerator * @param isFacing 'user' mapped with 'front' is true (default) 'environment' mapped with 'back' - * is false + * is false * @param sourceId (String) use this sourceId and ignore facing mode if specified. - * @return VideoCapturer can invoke with startCapture/stopCapture - * null if not matched camera with specified facing mode. + * @return VideoCapturer can invoke with startCapture/stopCapture null + * if not matched camera with specified facing mode. */ private VideoCapturer createVideoCapturer( - CameraEnumerator enumerator, - boolean isFacing, - String sourceId) { + CameraEnumerator enumerator, boolean isFacing, String sourceId) { VideoCapturer videoCapturer = null; // if sourceId given, use specified sourceId first @@ -283,22 +277,17 @@ private VideoCapturer createVideoCapturer( * Retrieves "facingMode" constraint value. * * @param mediaConstraints a ConstraintsMap which represents "GUM" constraints argument. - * @return String value of "facingMode" constraints in "GUM" or - * null if not specified. + * @return String value of "facingMode" constraints in "GUM" or null if not specified. */ private String getFacingMode(ConstraintsMap mediaConstraints) { - return - mediaConstraints == null - ? null - : mediaConstraints.getString("facingMode"); + return mediaConstraints == null ? null : mediaConstraints.getString("facingMode"); } /** * Retrieves "sourceId" constraint value. * * @param mediaConstraints a ConstraintsMap which represents "GUM" constraints argument - * @return String value of "sourceId" optional "GUM" constraint or - * null if not specified. + * @return String value of "sourceId" optional "GUM" constraint or null if not specified. */ private String getSourceIdConstraint(ConstraintsMap mediaConstraints) { if (mediaConstraints != null @@ -310,9 +299,7 @@ private String getSourceIdConstraint(ConstraintsMap mediaConstraints) { if (optional.getType(i) == ObjectType.Map) { ConstraintsMap option = optional.getMap(i); - if (option.hasKey("sourceId") - && option.getType("sourceId") - == ObjectType.String) { + if (option.hasKey("sourceId") && option.getType("sourceId") == ObjectType.String) { return option.getString("sourceId"); } } @@ -328,9 +315,7 @@ private AudioTrack getUserAudio(ConstraintsMap constraints) { audioConstraints = new MediaConstraints(); addDefaultAudioConstraints(audioConstraints); } else { - audioConstraints - = MediaConstraintsUtils.parseMediaConstraints( - constraints.getMap("audio")); + audioConstraints = MediaConstraintsUtils.parseMediaConstraints(constraints.getMap("audio")); } Log.i(TAG, "getUserMedia(audio): " + audioConstraints); @@ -348,9 +333,7 @@ private AudioTrack getUserAudio(ConstraintsMap constraints) { * requested. */ void getUserMedia( - final ConstraintsMap constraints, - final Result result, - final MediaStream mediaStream) { + final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) { // TODO: change getUserMedia constraints format to support new syntax // constraint format seems changed, and there is no mandatory any more. @@ -364,10 +347,8 @@ void getUserMedia( if (constraints.getType("video") == ObjectType.Map) { videoConstraintsMap = constraints.getMap("video"); if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") - == ObjectType.Map) { - videoConstraintsMandatory - = videoConstraintsMap.getMap("mandatory"); + && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { + videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); } } @@ -410,9 +391,7 @@ void getUserMedia( // requestedMediaTypes is the empty set, the method invocation fails // with a TypeError. if (requestPermissions.isEmpty()) { - result.error( - "TypeError", - "constraints requests no media types", null); + result.error("TypeError", "constraints requests no media types", null); return; } @@ -423,11 +402,7 @@ void getUserMedia( public void invoke(Object... args) { List grantedPermissions = (List) args[0]; - getUserMedia( - constraints, - result, - mediaStream, - grantedPermissions); + getUserMedia(constraints, result, mediaStream, grantedPermissions); } }, /* errorCallback */ new Callback() { @@ -439,130 +414,125 @@ public void invoke(Object... args) { // name attribute has the value NotAllowedError." result.error("DOMException", "NotAllowedError", null); } - } - ); + }); } void getDisplayMedia( - final ConstraintsMap constraints, - final Result result, - final MediaStream mediaStream) { + final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) { ConstraintsMap videoConstraintsMap = null; ConstraintsMap videoConstraintsMandatory = null; if (constraints.getType("video") == ObjectType.Map) { videoConstraintsMap = constraints.getMap("video"); if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") - == ObjectType.Map) { - videoConstraintsMandatory - = videoConstraintsMap.getMap("mandatory"); + && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { + videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); } } final ConstraintsMap videoConstraintsMandatory2 = videoConstraintsMandatory; - screenRequestPremissions(new ResultReceiver(new Handler(Looper.getMainLooper())) { - @Override - protected void onReceiveResult( - int requestCode, - Bundle resultData) { - - /* Create ScreenCapture */ - int resultCode = resultData.getInt(GRANT_RESULTS); - Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); - - if (resultCode != Activity.RESULT_OK) { - result.error(null, "User didn't give permission to capture the screen.", null); - return; - } - - MediaStreamTrack[] tracks = new MediaStreamTrack[1]; - VideoCapturer videoCapturer = null; - videoCapturer = new ScreenCapturerAndroid(mediaProjectionData, - new MediaProjection.Callback() { - @Override - public void onStop() { - Log.e(TAG, "User revoked permission to capture the screen."); - result.error(null, "User revoked permission to capture the screen.", null); + screenRequestPremissions( + new ResultReceiver(new Handler(Looper.getMainLooper())) { + @Override + protected void onReceiveResult(int requestCode, Bundle resultData) { + + /* Create ScreenCapture */ + int resultCode = resultData.getInt(GRANT_RESULTS); + Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); + + if (resultCode != Activity.RESULT_OK) { + result.error(null, "User didn't give permission to capture the screen.", null); + return; + } + + MediaStreamTrack[] tracks = new MediaStreamTrack[1]; + VideoCapturer videoCapturer = null; + videoCapturer = + new ScreenCapturerAndroid( + mediaProjectionData, + new MediaProjection.Callback() { + @Override + public void onStop() { + Log.e(TAG, "User revoked permission to capture the screen."); + result.error(null, "User revoked permission to capture the screen.", null); + } + }); + if (videoCapturer == null) { + result.error( + /* type */ "GetDisplayMediaFailed", "Failed to create new VideoCapturer!", null); + return; + } + + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); + VideoSource videoSource = pcFactory.createVideoSource(true); + + String threadName = Thread.currentThread().getName(); + SurfaceTextureHelper surfaceTextureHelper = + SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); + videoCapturer.initialize( + surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); + + WindowManager wm = + (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE); + + int width = wm.getDefaultDisplay().getWidth(); + int height = wm.getDefaultDisplay().getHeight(); + int fps = DEFAULT_FPS; + + videoCapturer.startCapture(width, height, fps); + Log.d(TAG, "ScreenCapturerAndroid.startCapture: " + width + "x" + height + "@" + fps); + + String trackId = stateProvider.getNextTrackUUID(); + mVideoCapturers.put(trackId, videoCapturer); + + tracks[0] = pcFactory.createVideoTrack(trackId, videoSource); + + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); + ConstraintsMap successResult = new ConstraintsMap(); + + for (MediaStreamTrack track : tracks) { + if (track == null) { + continue; } - }); - if (videoCapturer == null) { - result.error( - /* type */ "GetDisplayMediaFailed", - "Failed to create new VideoCapturer!", null); - return; - } - - PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); - VideoSource videoSource = pcFactory.createVideoSource(true); - - String threadName = Thread.currentThread().getName(); - SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper - .create(threadName, EglUtils.getRootEglBaseContext()); - videoCapturer.initialize(surfaceTextureHelper, applicationContext, - videoSource.getCapturerObserver()); - - WindowManager wm = (WindowManager) applicationContext - .getSystemService(Context.WINDOW_SERVICE); - - int width = wm.getDefaultDisplay().getWidth(); - int height = wm.getDefaultDisplay().getHeight(); - int fps = DEFAULT_FPS; - videoCapturer.startCapture(width, height, fps); - Log.d(TAG, "ScreenCapturerAndroid.startCapture: " + width + "x" + height + "@" + fps); + String id = track.id(); - String trackId = stateProvider.getNextTrackUUID(); - mVideoCapturers.put(trackId, videoCapturer); - - tracks[0] = pcFactory.createVideoTrack(trackId, videoSource); - - ConstraintsArray audioTracks = new ConstraintsArray(); - ConstraintsArray videoTracks = new ConstraintsArray(); - ConstraintsMap successResult = new ConstraintsMap(); - - for (MediaStreamTrack track : tracks) { - if (track == null) { - continue; - } - - String id = track.id(); - - if (track instanceof AudioTrack) { - mediaStream.addTrack((AudioTrack) track); - } else { - mediaStream.addTrack((VideoTrack) track); - } - stateProvider.getLocalTracks().put(id, track); - - ConstraintsMap track_ = new ConstraintsMap(); - String kind = track.kind(); + if (track instanceof AudioTrack) { + mediaStream.addTrack((AudioTrack) track); + } else { + mediaStream.addTrack((VideoTrack) track); + } + stateProvider.getLocalTracks().put(id, track); + + ConstraintsMap track_ = new ConstraintsMap(); + String kind = track.kind(); + + track_.putBoolean("enabled", track.enabled()); + track_.putString("id", id); + track_.putString("kind", kind); + track_.putString("label", kind); + track_.putString("readyState", track.state().toString()); + track_.putBoolean("remote", false); + + if (track instanceof AudioTrack) { + audioTracks.pushMap(track_); + } else { + videoTracks.pushMap(track_); + } + } - track_.putBoolean("enabled", track.enabled()); - track_.putString("id", id); - track_.putString("kind", kind); - track_.putString("label", kind); - track_.putString("readyState", track.state().toString()); - track_.putBoolean("remote", false); + String streamId = mediaStream.getId(); - if (track instanceof AudioTrack) { - audioTracks.pushMap(track_); - } else { - videoTracks.pushMap(track_); + Log.d(TAG, "MediaStream id: " + streamId); + stateProvider.getLocalStreams().put(streamId, mediaStream); + successResult.putString("streamId", streamId); + successResult.putArray("audioTracks", audioTracks.toArrayList()); + successResult.putArray("videoTracks", videoTracks.toArrayList()); + result.success(successResult.toMap()); } - } - - String streamId = mediaStream.getId(); - - Log.d(TAG, "MediaStream id: " + streamId); - stateProvider.getLocalStreams().put(streamId, mediaStream); - successResult.putString("streamId", streamId); - successResult.putArray("audioTracks", audioTracks.toArrayList()); - successResult.putArray("videoTracks", videoTracks.toArrayList()); - result.success(successResult.toMap()); - } - }); + }); } /** @@ -579,9 +549,9 @@ private void getUserMedia( // If we fail to create either, destroy the other one and fail. if ((grantedPermissions.contains(PERMISSION_AUDIO) - && (tracks[0] = getUserAudio(constraints)) == null) + && (tracks[0] = getUserAudio(constraints)) == null) || (grantedPermissions.contains(PERMISSION_VIDEO) - && (tracks[1] = getUserVideo(constraints)) == null)) { + && (tracks[1] = getUserVideo(constraints)) == null)) { for (MediaStreamTrack track : tracks) { if (track != null) { track.dispose(); @@ -592,9 +562,7 @@ private void getUserMedia( // specified by // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "GetUserMediaFailed", - "Failed to create new track", null); + result.error(/* type */ "GetUserMediaFailed", "Failed to create new track", null); return; } @@ -644,17 +612,14 @@ private void getUserMedia( result.success(successResult.toMap()); } - private VideoTrack getUserVideo(ConstraintsMap constraints) { ConstraintsMap videoConstraintsMap = null; ConstraintsMap videoConstraintsMandatory = null; if (constraints.getType("video") == ObjectType.Map) { videoConstraintsMap = constraints.getMap("video"); if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") - == ObjectType.Map) { - videoConstraintsMandatory - = videoConstraintsMap.getMap("mandatory"); + && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { + videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); } } @@ -663,7 +628,8 @@ private VideoTrack getUserVideo(ConstraintsMap constraints) { // NOTE: to support Camera2, the device should: // 1. Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP // 2. all camera support level should greater than LEGACY - // see: https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#INFO_SUPPORTED_HARDWARE_LEVEL + // see: + // https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#INFO_SUPPORTED_HARDWARE_LEVEL // TODO Enable camera2 enumerator CameraEnumerator cameraEnumerator; @@ -676,12 +642,10 @@ private VideoTrack getUserVideo(ConstraintsMap constraints) { } String facingMode = getFacingMode(videoConstraintsMap); - boolean isFacing - = facingMode == null || !facingMode.equals("environment"); + boolean isFacing = facingMode == null || !facingMode.equals("environment"); String sourceId = getSourceIdConstraint(videoConstraintsMap); - VideoCapturer videoCapturer - = createVideoCapturer(cameraEnumerator, isFacing, sourceId); + VideoCapturer videoCapturer = createVideoCapturer(cameraEnumerator, isFacing, sourceId); if (videoCapturer == null) { return null; @@ -690,24 +654,24 @@ private VideoTrack getUserVideo(ConstraintsMap constraints) { PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); VideoSource videoSource = pcFactory.createVideoSource(false); String threadName = Thread.currentThread().getName(); - SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper - .create(threadName, EglUtils.getRootEglBaseContext()); - videoCapturer - .initialize(surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); + SurfaceTextureHelper surfaceTextureHelper = + SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); + videoCapturer.initialize( + surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); // Fall back to defaults if keys are missing. - int width - = videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minWidth") - ? videoConstraintsMandatory.getInt("minWidth") - : DEFAULT_WIDTH; - int height - = videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minHeight") - ? videoConstraintsMandatory.getInt("minHeight") - : DEFAULT_HEIGHT; - int fps - = videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minFrameRate") - ? videoConstraintsMandatory.getInt("minFrameRate") - : DEFAULT_FPS; + int width = + videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minWidth") + ? videoConstraintsMandatory.getInt("minWidth") + : DEFAULT_WIDTH; + int height = + videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minHeight") + ? videoConstraintsMandatory.getInt("minHeight") + : DEFAULT_HEIGHT; + int fps = + videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minFrameRate") + ? videoConstraintsMandatory.getInt("minFrameRate") + : DEFAULT_FPS; videoCapturer.startCapture(width, height, fps); @@ -738,42 +702,43 @@ private void requestPermissions( final ArrayList permissions, final Callback successCallback, final Callback errorCallback) { - PermissionUtils.Callback callback = (permissions_, grantResults) -> { - List grantedPermissions = new ArrayList<>(); - List deniedPermissions = new ArrayList<>(); - - for (int i = 0; i < permissions_.length; ++i) { - String permission = permissions_[i]; - int grantResult = grantResults[i]; - - if (grantResult == PackageManager.PERMISSION_GRANTED) { - grantedPermissions.add(permission); - } else { - deniedPermissions.add(permission); - } - } + PermissionUtils.Callback callback = + (permissions_, grantResults) -> { + List grantedPermissions = new ArrayList<>(); + List deniedPermissions = new ArrayList<>(); + + for (int i = 0; i < permissions_.length; ++i) { + String permission = permissions_[i]; + int grantResult = grantResults[i]; + + if (grantResult == PackageManager.PERMISSION_GRANTED) { + grantedPermissions.add(permission); + } else { + deniedPermissions.add(permission); + } + } - // Success means that all requested permissions were granted. - for (String p : permissions) { - if (!grantedPermissions.contains(p)) { - // According to step 6 of the getUserMedia() algorithm - // "if the result is denied, jump to the step Permission - // Failure." - errorCallback.invoke(deniedPermissions); - return; - } - } - successCallback.invoke(grantedPermissions); - }; + // Success means that all requested permissions were granted. + for (String p : permissions) { + if (!grantedPermissions.contains(p)) { + // According to step 6 of the getUserMedia() algorithm + // "if the result is denied, jump to the step Permission + // Failure." + errorCallback.invoke(deniedPermissions); + return; + } + } + successCallback.invoke(grantedPermissions); + }; if (VERSION.SDK_INT >= VERSION_CODES.M) { final Activity activity = stateProvider.getActivity(); if (activity != null) { PermissionUtils.requestPermissions( - activity, - permissions.toArray(new String[permissions.size()]), - callback); + activity, permissions.toArray(new String[permissions.size()]), callback); } + } else { + successCallback.invoke(permissions); } } @@ -784,19 +749,19 @@ void switchCamera(String id, Result result) { return; } - CameraVideoCapturer cameraVideoCapturer - = (CameraVideoCapturer) videoCapturer; - cameraVideoCapturer.switchCamera(new CameraVideoCapturer.CameraSwitchHandler() { - @Override - public void onCameraSwitchDone(boolean b) { - result.success(b); - } + CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer; + cameraVideoCapturer.switchCamera( + new CameraVideoCapturer.CameraSwitchHandler() { + @Override + public void onCameraSwitchDone(boolean b) { + result.success(b); + } - @Override - public void onCameraSwitchError(String s) { - result.error("Switching camera failed", s, null); - } - }); + @Override + public void onCameraSwitchError(String s) { + result.error("Switching camera failed", s, null); + } + }); } /** @@ -806,9 +771,10 @@ public void onCameraSwitchError(String s) { * @param videoTrack to record or null if only audio needed * @param audioChannel channel for recording or null * @throws Exception lot of different exceptions, pass back to dart layer to print them at least - **/ - void startRecordingToFile(String path, Integer id, @Nullable VideoTrack videoTrack, - @Nullable AudioChannel audioChannel) throws Exception { + */ + void startRecordingToFile( + String path, Integer id, @Nullable VideoTrack videoTrack, @Nullable AudioChannel audioChannel) + throws Exception { AudioSamplesInterceptor interceptor = null; if (audioChannel == AudioChannel.INPUT) { interceptor = inputSamplesInterceptor; @@ -834,7 +800,8 @@ void stopRecording(Integer id) { values.put(MediaStore.Video.Media.TITLE, file.getName()); values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4"); values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath()); - applicationContext.getContentResolver() + applicationContext + .getContentResolver() .insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values); } } @@ -852,12 +819,14 @@ void hasTorch(String trackId, Result result) { CameraDevice cameraDevice; try { - Object session = getPrivateProperty(Camera2Capturer.class.getSuperclass(), videoCapturer, - "currentSession"); - manager = (CameraManager) getPrivateProperty(Camera2Capturer.class, videoCapturer, - "cameraManager"); - cameraDevice = (CameraDevice) getPrivateProperty(session.getClass(), session, - "cameraDevice"); + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), videoCapturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, videoCapturer, "cameraManager"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera2Capturer class have changed Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); @@ -867,8 +836,8 @@ void hasTorch(String trackId, Result result) { boolean flashIsAvailable; try { - CameraCharacteristics characteristics = manager - .getCameraCharacteristics(cameraDevice.getId()); + CameraCharacteristics characteristics = + manager.getCameraCharacteristics(cameraDevice.getId()); flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); } catch (CameraAccessException e) { // Should never happen since we are already accessing the camera @@ -883,8 +852,9 @@ void hasTorch(String trackId, Result result) { Camera camera; try { - Object session = getPrivateProperty(Camera1Capturer.class.getSuperclass(), videoCapturer, - "currentSession"); + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), videoCapturer, "currentSession"); camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera1Capturer class have changed @@ -896,8 +866,8 @@ void hasTorch(String trackId, Result result) { Parameters params = camera.getParameters(); List supportedModes = params.getSupportedFlashModes(); - result.success((supportedModes == null) ? false - : supportedModes.contains(Parameters.FLASH_MODE_TORCH)); + result.success( + (supportedModes == null) ? false : supportedModes.contains(Parameters.FLASH_MODE_TORCH)); return; } @@ -922,20 +892,23 @@ void setTorch(String trackId, boolean torch, Result result) { Handler cameraThreadHandler; try { - Object session = getPrivateProperty(Camera2Capturer.class.getSuperclass(), videoCapturer, - "currentSession"); - CameraManager manager = (CameraManager) getPrivateProperty(Camera2Capturer.class, - videoCapturer, "cameraManager"); - captureSession = (CameraCaptureSession) getPrivateProperty(session.getClass(), session, - "captureSession"); - cameraDevice = (CameraDevice) getPrivateProperty(session.getClass(), session, - "cameraDevice"); - captureFormat = (CaptureFormat) getPrivateProperty(session.getClass(), session, - "captureFormat"); + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), videoCapturer, "currentSession"); + CameraManager manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, videoCapturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); - cameraThreadHandler = (Handler) getPrivateProperty(session.getClass(), session, - "cameraThreadHandler"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera2Capturer class have changed Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); @@ -944,19 +917,22 @@ void setTorch(String trackId, boolean torch, Result result) { } try { - final CaptureRequest.Builder captureRequestBuilder = cameraDevice - .createCaptureRequest(CameraDevice.TEMPLATE_RECORD); - captureRequestBuilder.set(CaptureRequest.FLASH_MODE, + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); - captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, - new Range<>(captureFormat.framerate.min / fpsUnitFactor, + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, captureFormat.framerate.max / fpsUnitFactor)); captureRequestBuilder.set( CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); captureRequestBuilder.addTarget(surface); - captureSession - .setRepeatingRequest(captureRequestBuilder.build(), null, cameraThreadHandler); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); } catch (CameraAccessException e) { // Should never happen since we are already accessing the camera throw new RuntimeException(e); @@ -969,8 +945,9 @@ void setTorch(String trackId, boolean torch, Result result) { if (videoCapturer instanceof Camera1Capturer) { Camera camera; try { - Object session = getPrivateProperty(Camera1Capturer.class.getSuperclass(), videoCapturer, - "currentSession"); + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), videoCapturer, "currentSession"); camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera1Capturer class have changed diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java index 7359b741ab..54da24c1cd 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java @@ -5,12 +5,14 @@ import android.app.FragmentTransaction; import android.content.pm.PackageManager; import android.os.Build; +import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.ResultReceiver; import androidx.annotation.RequiresApi; +import androidx.core.content.ContextCompat; import java.util.ArrayList; /** @@ -99,7 +101,6 @@ private static void requestPermissions( } } - @RequiresApi(api = VERSION_CODES.M) public static void requestPermissions( final Activity activity, final String[] permissions, @@ -143,7 +144,7 @@ public interface Callback { * using a ResultReceiver. */ public static class RequestPermissionsFragment extends Fragment { - @RequiresApi(api = VERSION_CODES.M) +// @RequiresApi(api = VERSION_CODES.M) private void checkSelfPermissions(boolean requestPermissions) { // Figure out which of the requested permissions are actually denied // because we do not want to ask about the granted permissions @@ -236,7 +237,9 @@ public void onRequestPermissionsResult( public void onResume() { super.onResume(); - checkSelfPermissions(/* requestPermissions */ true); - } + if (VERSION.SDK_INT >= VERSION_CODES.M) { + checkSelfPermissions(/* requestPermissions */ true); + } + } } } From e114a8a4848ac45d20132639fc1c36ab0ee65945 Mon Sep 17 00:00:00 2001 From: Sebastian Roth Date: Sat, 13 Jun 2020 16:56:14 +0100 Subject: [PATCH 5/6] Reformat & document permission shortcut on pre-M devices --- .../webrtc/DataChannelObserver.java | 157 +- .../cloudwebrtc/webrtc/GetUserMediaImpl.java | 1700 +++++++++-------- .../webrtc/utils/PermissionUtils.java | 395 ++-- 3 files changed, 1115 insertions(+), 1137 deletions(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java index 49b25dcad6..108897fee5 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java @@ -2,93 +2,96 @@ import com.cloudwebrtc.webrtc.utils.AnyThreadSink; import com.cloudwebrtc.webrtc.utils.ConstraintsMap; + +import org.webrtc.DataChannel; + +import java.nio.charset.Charset; + import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.EventChannel; -import java.nio.charset.Charset; -import org.webrtc.DataChannel; class DataChannelObserver implements DataChannel.Observer, EventChannel.StreamHandler { - private final int mId; - private final DataChannel mDataChannel; - - private EventChannel eventChannel; - private EventChannel.EventSink eventSink; - - DataChannelObserver(BinaryMessenger messenger, String peerConnectionId, int id, - DataChannel dataChannel) { - mId = id; - mDataChannel = dataChannel; - eventChannel = - new EventChannel(messenger, "FlutterWebRTC/dataChannelEvent" + peerConnectionId + id); - eventChannel.setStreamHandler(this); - } - - private String dataChannelStateString(DataChannel.State dataChannelState) { - switch (dataChannelState) { - case CONNECTING: - return "connecting"; - case OPEN: - return "open"; - case CLOSING: - return "closing"; - case CLOSED: - return "closed"; + private final int mId; + private final DataChannel mDataChannel; + + private EventChannel eventChannel; + private EventChannel.EventSink eventSink; + + DataChannelObserver(BinaryMessenger messenger, String peerConnectionId, int id, + DataChannel dataChannel) { + mId = id; + mDataChannel = dataChannel; + eventChannel = + new EventChannel(messenger, "FlutterWebRTC/dataChannelEvent" + peerConnectionId + id); + eventChannel.setStreamHandler(this); + } + + private String dataChannelStateString(DataChannel.State dataChannelState) { + switch (dataChannelState) { + case CONNECTING: + return "connecting"; + case OPEN: + return "open"; + case CLOSING: + return "closing"; + case CLOSED: + return "closed"; + } + return ""; + } + + @Override + public void onListen(Object o, EventChannel.EventSink sink) { + eventSink = new AnyThreadSink(sink); + } + + @Override + public void onCancel(Object o) { + eventSink = null; } - return ""; - } - - @Override - public void onListen(Object o, EventChannel.EventSink sink) { - eventSink = new AnyThreadSink(sink); - } - - @Override - public void onCancel(Object o) { - eventSink = null; - } - - @Override - public void onBufferedAmountChange(long amount) { - } - - @Override - public void onStateChange() { - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "dataChannelStateChanged"); - params.putInt("id", mDataChannel.id()); - params.putString("state", dataChannelStateString(mDataChannel.state())); - sendEvent(params); - } - - @Override - public void onMessage(DataChannel.Buffer buffer) { - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "dataChannelReceiveMessage"); - params.putInt("id", mDataChannel.id()); - - byte[] bytes; - if (buffer.data.hasArray()) { - bytes = buffer.data.array(); - } else { - bytes = new byte[buffer.data.remaining()]; - buffer.data.get(bytes); + + @Override + public void onBufferedAmountChange(long amount) { } - if (buffer.binary) { - params.putString("type", "binary"); - params.putByte("data", bytes); - } else { - params.putString("type", "text"); - params.putString("data", new String(bytes, Charset.forName("UTF-8"))); + @Override + public void onStateChange() { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "dataChannelStateChanged"); + params.putInt("id", mDataChannel.id()); + params.putString("state", dataChannelStateString(mDataChannel.state())); + sendEvent(params); } - sendEvent(params); - } + @Override + public void onMessage(DataChannel.Buffer buffer) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "dataChannelReceiveMessage"); + params.putInt("id", mDataChannel.id()); + + byte[] bytes; + if (buffer.data.hasArray()) { + bytes = buffer.data.array(); + } else { + bytes = new byte[buffer.data.remaining()]; + buffer.data.get(bytes); + } + + if (buffer.binary) { + params.putString("type", "binary"); + params.putByte("data", bytes); + } else { + params.putString("type", "text"); + params.putString("data", new String(bytes, Charset.forName("UTF-8"))); + } + + sendEvent(params); + } - private void sendEvent(ConstraintsMap params) { - if (eventSink != null) { - eventSink.success(params.toMap()); + private void sendEvent(ConstraintsMap params) { + if (eventSink != null) { + eventSink.success(params.toMap()); + } } - } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java index aa23346888..8d94262ec5 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java @@ -31,8 +31,10 @@ import android.util.SparseArray; import android.view.Surface; import android.view.WindowManager; + import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; + import com.cloudwebrtc.webrtc.record.AudioChannel; import com.cloudwebrtc.webrtc.record.AudioSamplesInterceptor; import com.cloudwebrtc.webrtc.record.MediaRecorderImpl; @@ -44,13 +46,7 @@ import com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils; import com.cloudwebrtc.webrtc.utils.ObjectType; import com.cloudwebrtc.webrtc.utils.PermissionUtils; -import io.flutter.plugin.common.MethodChannel.Result; -import java.io.File; -import java.lang.reflect.Field; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; + import org.webrtc.AudioSource; import org.webrtc.AudioTrack; import org.webrtc.Camera1Capturer; @@ -71,927 +67,939 @@ import org.webrtc.VideoTrack; import org.webrtc.audio.JavaAudioDeviceModule; +import java.io.File; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import io.flutter.plugin.common.MethodChannel.Result; + /** * The implementation of {@code getUserMedia} extracted into a separate file in order to reduce * complexity and to (somewhat) separate concerns. */ class GetUserMediaImpl { - private static final int DEFAULT_WIDTH = 1280; - private static final int DEFAULT_HEIGHT = 720; - private static final int DEFAULT_FPS = 30; - - private static final String PERMISSION_AUDIO = Manifest.permission.RECORD_AUDIO; - private static final String PERMISSION_VIDEO = Manifest.permission.CAMERA; - private static final String PERMISSION_SCREEN = "android.permission.MediaProjection"; - private static int CAPTURE_PERMISSION_REQUEST_CODE = 1; - private static final String GRANT_RESULTS = "GRANT_RESULT"; - private static final String PERMISSIONS = "PERMISSION"; - private static final String PROJECTION_DATA = "PROJECTION_DATA"; - private static final String RESULT_RECEIVER = "RESULT_RECEIVER"; - private static final String REQUEST_CODE = "REQUEST_CODE"; - - static final String TAG = FlutterWebRTCPlugin.TAG; - - private final Map mVideoCapturers = new HashMap<>(); - - private final StateProvider stateProvider; - private final Context applicationContext; - - static final int minAPILevel = Build.VERSION_CODES.LOLLIPOP; - private MediaProjectionManager mProjectionManager = null; - private static MediaProjection sMediaProjection = null; - - final AudioSamplesInterceptor inputSamplesInterceptor = new AudioSamplesInterceptor(); - private OutputAudioSamplesInterceptor outputSamplesInterceptor = null; - JavaAudioDeviceModule audioDeviceModule; - private final SparseArray mediaRecorders = new SparseArray<>(); - - public void screenRequestPremissions(ResultReceiver resultReceiver) { - final Activity activity = stateProvider.getActivity(); - if (activity == null) { - // Activity went away, nothing we can do. - return; - } + private static final int DEFAULT_WIDTH = 1280; + private static final int DEFAULT_HEIGHT = 720; + private static final int DEFAULT_FPS = 30; + + private static final String PERMISSION_AUDIO = Manifest.permission.RECORD_AUDIO; + private static final String PERMISSION_VIDEO = Manifest.permission.CAMERA; + private static final String PERMISSION_SCREEN = "android.permission.MediaProjection"; + private static int CAPTURE_PERMISSION_REQUEST_CODE = 1; + private static final String GRANT_RESULTS = "GRANT_RESULT"; + private static final String PERMISSIONS = "PERMISSION"; + private static final String PROJECTION_DATA = "PROJECTION_DATA"; + private static final String RESULT_RECEIVER = "RESULT_RECEIVER"; + private static final String REQUEST_CODE = "REQUEST_CODE"; + + static final String TAG = FlutterWebRTCPlugin.TAG; + + private final Map mVideoCapturers = new HashMap<>(); + + private final StateProvider stateProvider; + private final Context applicationContext; + + static final int minAPILevel = Build.VERSION_CODES.LOLLIPOP; + private MediaProjectionManager mProjectionManager = null; + private static MediaProjection sMediaProjection = null; + + final AudioSamplesInterceptor inputSamplesInterceptor = new AudioSamplesInterceptor(); + private OutputAudioSamplesInterceptor outputSamplesInterceptor = null; + JavaAudioDeviceModule audioDeviceModule; + private final SparseArray mediaRecorders = new SparseArray<>(); + + public void screenRequestPremissions(ResultReceiver resultReceiver) { + final Activity activity = stateProvider.getActivity(); + if (activity == null) { + // Activity went away, nothing we can do. + return; + } - Bundle args = new Bundle(); - args.putParcelable(RESULT_RECEIVER, resultReceiver); - args.putInt(REQUEST_CODE, CAPTURE_PERMISSION_REQUEST_CODE); + Bundle args = new Bundle(); + args.putParcelable(RESULT_RECEIVER, resultReceiver); + args.putInt(REQUEST_CODE, CAPTURE_PERMISSION_REQUEST_CODE); - ScreenRequestPermissionsFragment fragment = new ScreenRequestPermissionsFragment(); - fragment.setArguments(args); + ScreenRequestPermissionsFragment fragment = new ScreenRequestPermissionsFragment(); + fragment.setArguments(args); - FragmentTransaction transaction = - activity - .getFragmentManager() - .beginTransaction() - .add(fragment, fragment.getClass().getName()); + FragmentTransaction transaction = + activity + .getFragmentManager() + .beginTransaction() + .add(fragment, fragment.getClass().getName()); - try { - transaction.commit(); - } catch (IllegalStateException ise) { + try { + transaction.commit(); + } catch (IllegalStateException ise) { - } - } - - public static class ScreenRequestPermissionsFragment extends Fragment { - - private ResultReceiver resultReceiver = null; - private int requestCode = 0; - private int resultCode = 0; - - private void checkSelfPermissions(boolean requestPermissions) { - if (resultCode != Activity.RESULT_OK) { - Activity activity = this.getActivity(); - Bundle args = getArguments(); - resultReceiver = args.getParcelable(RESULT_RECEIVER); - requestCode = args.getInt(REQUEST_CODE); - requestStart(activity, requestCode); - } + } } - public void requestStart(Activity activity, int requestCode) { - if (android.os.Build.VERSION.SDK_INT < minAPILevel) { - Log.w( - TAG, - "Can't run requestStart() due to a low API level. API level 21 or higher is required."); - return; - } else { - MediaProjectionManager mediaProjectionManager = - (MediaProjectionManager) activity.getSystemService(Context.MEDIA_PROJECTION_SERVICE); - - // call for the projection manager - this.startActivityForResult( - mediaProjectionManager.createScreenCaptureIntent(), requestCode); - } - } + public static class ScreenRequestPermissionsFragment extends Fragment { - @Override - public void onActivityResult(int requestCode, int resultCode, Intent data) { - super.onActivityResult(requestCode, resultCode, data); - resultCode = resultCode; - String[] permissions; - if (resultCode != Activity.RESULT_OK) { - finish(); - Bundle resultData = new Bundle(); - resultData.putString(PERMISSIONS, PERMISSION_SCREEN); - resultData.putInt(GRANT_RESULTS, resultCode); - resultReceiver.send(requestCode, resultData); - return; - } - Bundle resultData = new Bundle(); - resultData.putString(PERMISSIONS, PERMISSION_SCREEN); - resultData.putInt(GRANT_RESULTS, resultCode); - resultData.putParcelable(PROJECTION_DATA, data); - resultReceiver.send(requestCode, resultData); - finish(); - } + private ResultReceiver resultReceiver = null; + private int requestCode = 0; + private int resultCode = 0; - private void finish() { - Activity activity = getActivity(); - if (activity != null) { - activity.getFragmentManager().beginTransaction().remove(this).commitAllowingStateLoss(); - } - } + private void checkSelfPermissions(boolean requestPermissions) { + if (resultCode != Activity.RESULT_OK) { + Activity activity = this.getActivity(); + Bundle args = getArguments(); + resultReceiver = args.getParcelable(RESULT_RECEIVER); + requestCode = args.getInt(REQUEST_CODE); + requestStart(activity, requestCode); + } + } - @Override - public void onResume() { - super.onResume(); - checkSelfPermissions(/* requestPermissions */ true); - } - } - - GetUserMediaImpl(StateProvider stateProvider, Context applicationContext) { - this.stateProvider = stateProvider; - this.applicationContext = applicationContext; - } - - /** - * Includes default constraints set for the audio media type. - * - * @param audioConstraints MediaConstraints instance to be filled with the default - * constraints for audio media type. - */ - private void addDefaultAudioConstraints(MediaConstraints audioConstraints) { - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googNoiseSuppression", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googEchoCancellation", "true")); - audioConstraints.optional.add(new MediaConstraints.KeyValuePair("echoCancellation", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googEchoCancellation2", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googDAEchoCancellation", "true")); - } - - /** - * Create video capturer via given facing mode - * - * @param enumerator a CameraEnumerator provided by webrtc it can be Camera1Enumerator or - * Camera2Enumerator - * @param isFacing 'user' mapped with 'front' is true (default) 'environment' mapped with 'back' - * is false - * @param sourceId (String) use this sourceId and ignore facing mode if specified. - * @return VideoCapturer can invoke with startCapture/stopCapture null - * if not matched camera with specified facing mode. - */ - private VideoCapturer createVideoCapturer( - CameraEnumerator enumerator, boolean isFacing, String sourceId) { - VideoCapturer videoCapturer = null; - - // if sourceId given, use specified sourceId first - final String[] deviceNames = enumerator.getDeviceNames(); - if (sourceId != null) { - for (String name : deviceNames) { - if (name.equals(sourceId)) { - videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); - if (videoCapturer != null) { - Log.d(TAG, "create user specified camera " + name + " succeeded"); - return videoCapturer; - } else { - Log.d(TAG, "create user specified camera " + name + " failed"); - break; // fallback to facing mode - } - } - } - } + public void requestStart(Activity activity, int requestCode) { + if (android.os.Build.VERSION.SDK_INT < minAPILevel) { + Log.w( + TAG, + "Can't run requestStart() due to a low API level. API level 21 or higher is required."); + return; + } else { + MediaProjectionManager mediaProjectionManager = + (MediaProjectionManager) activity.getSystemService(Context.MEDIA_PROJECTION_SERVICE); - // otherwise, use facing mode - String facingStr = isFacing ? "front" : "back"; - for (String name : deviceNames) { - if (enumerator.isFrontFacing(name) == isFacing) { - videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); - if (videoCapturer != null) { - Log.d(TAG, "Create " + facingStr + " camera " + name + " succeeded"); - return videoCapturer; - } else { - Log.e(TAG, "Create " + facingStr + " camera " + name + " failed"); + // call for the projection manager + this.startActivityForResult( + mediaProjectionManager.createScreenCaptureIntent(), requestCode); + } } - } - } - // should we fallback to available camera automatically? - return videoCapturer; - } - - /** - * Retrieves "facingMode" constraint value. - * - * @param mediaConstraints a ConstraintsMap which represents "GUM" constraints argument. - * @return String value of "facingMode" constraints in "GUM" or null if not specified. - */ - private String getFacingMode(ConstraintsMap mediaConstraints) { - return mediaConstraints == null ? null : mediaConstraints.getString("facingMode"); - } - - /** - * Retrieves "sourceId" constraint value. - * - * @param mediaConstraints a ConstraintsMap which represents "GUM" constraints argument - * @return String value of "sourceId" optional "GUM" constraint or null if not specified. - */ - private String getSourceIdConstraint(ConstraintsMap mediaConstraints) { - if (mediaConstraints != null - && mediaConstraints.hasKey("optional") - && mediaConstraints.getType("optional") == ObjectType.Array) { - ConstraintsArray optional = mediaConstraints.getArray("optional"); - - for (int i = 0, size = optional.size(); i < size; i++) { - if (optional.getType(i) == ObjectType.Map) { - ConstraintsMap option = optional.getMap(i); - - if (option.hasKey("sourceId") && option.getType("sourceId") == ObjectType.String) { - return option.getString("sourceId"); - } - } - } - } - return null; - } + @Override + public void onActivityResult(int requestCode, int resultCode, Intent data) { + super.onActivityResult(requestCode, resultCode, data); + resultCode = resultCode; + String[] permissions; + if (resultCode != Activity.RESULT_OK) { + finish(); + Bundle resultData = new Bundle(); + resultData.putString(PERMISSIONS, PERMISSION_SCREEN); + resultData.putInt(GRANT_RESULTS, resultCode); + resultReceiver.send(requestCode, resultData); + return; + } + Bundle resultData = new Bundle(); + resultData.putString(PERMISSIONS, PERMISSION_SCREEN); + resultData.putInt(GRANT_RESULTS, resultCode); + resultData.putParcelable(PROJECTION_DATA, data); + resultReceiver.send(requestCode, resultData); + finish(); + } - private AudioTrack getUserAudio(ConstraintsMap constraints) { - MediaConstraints audioConstraints; - if (constraints.getType("audio") == ObjectType.Boolean) { - audioConstraints = new MediaConstraints(); - addDefaultAudioConstraints(audioConstraints); - } else { - audioConstraints = MediaConstraintsUtils.parseMediaConstraints(constraints.getMap("audio")); - } + private void finish() { + Activity activity = getActivity(); + if (activity != null) { + activity.getFragmentManager().beginTransaction().remove(this).commitAllowingStateLoss(); + } + } - Log.i(TAG, "getUserMedia(audio): " + audioConstraints); - - String trackId = stateProvider.getNextTrackUUID(); - PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); - AudioSource audioSource = pcFactory.createAudioSource(audioConstraints); - - return pcFactory.createAudioTrack(trackId, audioSource); - } - - /** - * Implements {@code getUserMedia} without knowledge whether the necessary permissions have - * already been granted. If the necessary permissions have not been granted yet, they will be - * requested. - */ - void getUserMedia( - final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) { - - // TODO: change getUserMedia constraints format to support new syntax - // constraint format seems changed, and there is no mandatory any more. - // and has a new syntax/attrs to specify resolution - // should change `parseConstraints()` according - // see: https://www.w3.org/TR/mediacapture-streams/#idl-def-MediaTrackConstraints - - ConstraintsMap videoConstraintsMap = null; - ConstraintsMap videoConstraintsMandatory = null; - - if (constraints.getType("video") == ObjectType.Map) { - videoConstraintsMap = constraints.getMap("video"); - if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { - videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); - } + @Override + public void onResume() { + super.onResume(); + checkSelfPermissions(/* requestPermissions */ true); + } } - final ArrayList requestPermissions = new ArrayList<>(); - - if (constraints.hasKey("audio")) { - switch (constraints.getType("audio")) { - case Boolean: - if (constraints.getBoolean("audio")) { - requestPermissions.add(PERMISSION_AUDIO); - } - break; - case Map: - requestPermissions.add(PERMISSION_AUDIO); - break; - default: - break; - } - } + GetUserMediaImpl(StateProvider stateProvider, Context applicationContext) { + this.stateProvider = stateProvider; + this.applicationContext = applicationContext; + } + + /** + * Includes default constraints set for the audio media type. + * + * @param audioConstraints MediaConstraints instance to be filled with the default + * constraints for audio media type. + */ + private void addDefaultAudioConstraints(MediaConstraints audioConstraints) { + audioConstraints.optional.add( + new MediaConstraints.KeyValuePair("googNoiseSuppression", "true")); + audioConstraints.optional.add( + new MediaConstraints.KeyValuePair("googEchoCancellation", "true")); + audioConstraints.optional.add(new MediaConstraints.KeyValuePair("echoCancellation", "true")); + audioConstraints.optional.add( + new MediaConstraints.KeyValuePair("googEchoCancellation2", "true")); + audioConstraints.optional.add( + new MediaConstraints.KeyValuePair("googDAEchoCancellation", "true")); + } + + /** + * Create video capturer via given facing mode + * + * @param enumerator a CameraEnumerator provided by webrtc it can be Camera1Enumerator or + * Camera2Enumerator + * @param isFacing 'user' mapped with 'front' is true (default) 'environment' mapped with 'back' + * is false + * @param sourceId (String) use this sourceId and ignore facing mode if specified. + * @return VideoCapturer can invoke with startCapture/stopCapture null + * if not matched camera with specified facing mode. + */ + private VideoCapturer createVideoCapturer( + CameraEnumerator enumerator, boolean isFacing, String sourceId) { + VideoCapturer videoCapturer = null; + + // if sourceId given, use specified sourceId first + final String[] deviceNames = enumerator.getDeviceNames(); + if (sourceId != null) { + for (String name : deviceNames) { + if (name.equals(sourceId)) { + videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); + if (videoCapturer != null) { + Log.d(TAG, "create user specified camera " + name + " succeeded"); + return videoCapturer; + } else { + Log.d(TAG, "create user specified camera " + name + " failed"); + break; // fallback to facing mode + } + } + } + } - if (constraints.hasKey("video")) { - switch (constraints.getType("video")) { - case Boolean: - if (constraints.getBoolean("video")) { - requestPermissions.add(PERMISSION_VIDEO); - } - break; - case Map: - requestPermissions.add(PERMISSION_VIDEO); - break; - default: - break; - } - } + // otherwise, use facing mode + String facingStr = isFacing ? "front" : "back"; + for (String name : deviceNames) { + if (enumerator.isFrontFacing(name) == isFacing) { + videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); + if (videoCapturer != null) { + Log.d(TAG, "Create " + facingStr + " camera " + name + " succeeded"); + return videoCapturer; + } else { + Log.e(TAG, "Create " + facingStr + " camera " + name + " failed"); + } + } + } + // should we fallback to available camera automatically? + return videoCapturer; + } + + /** + * Retrieves "facingMode" constraint value. + * + * @param mediaConstraints a ConstraintsMap which represents "GUM" constraints argument. + * @return String value of "facingMode" constraints in "GUM" or null if not specified. + */ + private String getFacingMode(ConstraintsMap mediaConstraints) { + return mediaConstraints == null ? null : mediaConstraints.getString("facingMode"); + } + + /** + * Retrieves "sourceId" constraint value. + * + * @param mediaConstraints a ConstraintsMap which represents "GUM" constraints argument + * @return String value of "sourceId" optional "GUM" constraint or null if not specified. + */ + private String getSourceIdConstraint(ConstraintsMap mediaConstraints) { + if (mediaConstraints != null + && mediaConstraints.hasKey("optional") + && mediaConstraints.getType("optional") == ObjectType.Array) { + ConstraintsArray optional = mediaConstraints.getArray("optional"); + + for (int i = 0, size = optional.size(); i < size; i++) { + if (optional.getType(i) == ObjectType.Map) { + ConstraintsMap option = optional.getMap(i); + + if (option.hasKey("sourceId") && option.getType("sourceId") == ObjectType.String) { + return option.getString("sourceId"); + } + } + } + } - // According to step 2 of the getUserMedia() algorithm, - // requestedMediaTypes is the set of media types in constraints with - // either a dictionary value or a value of "true". - // According to step 3 of the getUserMedia() algorithm, if - // requestedMediaTypes is the empty set, the method invocation fails - // with a TypeError. - if (requestPermissions.isEmpty()) { - result.error("TypeError", "constraints requests no media types", null); - return; + return null; } - requestPermissions( - requestPermissions, - /* successCallback */ new Callback() { - @Override - public void invoke(Object... args) { - List grantedPermissions = (List) args[0]; - - getUserMedia(constraints, result, mediaStream, grantedPermissions); - } - }, - /* errorCallback */ new Callback() { - @Override - public void invoke(Object... args) { - // According to step 10 Permission Failure of the - // getUserMedia() algorithm, if the user has denied - // permission, fail "with a new DOMException object whose - // name attribute has the value NotAllowedError." - result.error("DOMException", "NotAllowedError", null); - } - }); - } - - void getDisplayMedia( - final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) { - ConstraintsMap videoConstraintsMap = null; - ConstraintsMap videoConstraintsMandatory = null; - - if (constraints.getType("video") == ObjectType.Map) { - videoConstraintsMap = constraints.getMap("video"); - if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { - videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); - } + private AudioTrack getUserAudio(ConstraintsMap constraints) { + MediaConstraints audioConstraints; + if (constraints.getType("audio") == ObjectType.Boolean) { + audioConstraints = new MediaConstraints(); + addDefaultAudioConstraints(audioConstraints); + } else { + audioConstraints = MediaConstraintsUtils.parseMediaConstraints(constraints.getMap("audio")); + } + + Log.i(TAG, "getUserMedia(audio): " + audioConstraints); + + String trackId = stateProvider.getNextTrackUUID(); + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); + AudioSource audioSource = pcFactory.createAudioSource(audioConstraints); + + return pcFactory.createAudioTrack(trackId, audioSource); } - final ConstraintsMap videoConstraintsMandatory2 = videoConstraintsMandatory; + /** + * Implements {@code getUserMedia} without knowledge whether the necessary permissions have + * already been granted. If the necessary permissions have not been granted yet, they will be + * requested. + */ + void getUserMedia( + final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) { - screenRequestPremissions( - new ResultReceiver(new Handler(Looper.getMainLooper())) { - @Override - protected void onReceiveResult(int requestCode, Bundle resultData) { + // TODO: change getUserMedia constraints format to support new syntax + // constraint format seems changed, and there is no mandatory any more. + // and has a new syntax/attrs to specify resolution + // should change `parseConstraints()` according + // see: https://www.w3.org/TR/mediacapture-streams/#idl-def-MediaTrackConstraints - /* Create ScreenCapture */ - int resultCode = resultData.getInt(GRANT_RESULTS); - Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); + ConstraintsMap videoConstraintsMap = null; + ConstraintsMap videoConstraintsMandatory = null; - if (resultCode != Activity.RESULT_OK) { - result.error(null, "User didn't give permission to capture the screen.", null); - return; + if (constraints.getType("video") == ObjectType.Map) { + videoConstraintsMap = constraints.getMap("video"); + if (videoConstraintsMap.hasKey("mandatory") + && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { + videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); } + } - MediaStreamTrack[] tracks = new MediaStreamTrack[1]; - VideoCapturer videoCapturer = null; - videoCapturer = - new ScreenCapturerAndroid( - mediaProjectionData, - new MediaProjection.Callback() { - @Override - public void onStop() { - Log.e(TAG, "User revoked permission to capture the screen."); - result.error(null, "User revoked permission to capture the screen.", null); - } - }); - if (videoCapturer == null) { - result.error( - /* type */ "GetDisplayMediaFailed", "Failed to create new VideoCapturer!", null); - return; + final ArrayList requestPermissions = new ArrayList<>(); + + if (constraints.hasKey("audio")) { + switch (constraints.getType("audio")) { + case Boolean: + if (constraints.getBoolean("audio")) { + requestPermissions.add(PERMISSION_AUDIO); + } + break; + case Map: + requestPermissions.add(PERMISSION_AUDIO); + break; + default: + break; } + } - PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); - VideoSource videoSource = pcFactory.createVideoSource(true); - - String threadName = Thread.currentThread().getName(); - SurfaceTextureHelper surfaceTextureHelper = - SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); - videoCapturer.initialize( - surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); + if (constraints.hasKey("video")) { + switch (constraints.getType("video")) { + case Boolean: + if (constraints.getBoolean("video")) { + requestPermissions.add(PERMISSION_VIDEO); + } + break; + case Map: + requestPermissions.add(PERMISSION_VIDEO); + break; + default: + break; + } + } - WindowManager wm = - (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE); + // According to step 2 of the getUserMedia() algorithm, + // requestedMediaTypes is the set of media types in constraints with + // either a dictionary value or a value of "true". + // According to step 3 of the getUserMedia() algorithm, if + // requestedMediaTypes is the empty set, the method invocation fails + // with a TypeError. + if (requestPermissions.isEmpty()) { + result.error("TypeError", "constraints requests no media types", null); + return; + } - int width = wm.getDefaultDisplay().getWidth(); - int height = wm.getDefaultDisplay().getHeight(); - int fps = DEFAULT_FPS; + /// Only systems pre-M, no additional permission request is needed. + if (VERSION.SDK_INT < VERSION_CODES.M) { + getUserMedia(constraints, result, mediaStream, requestPermissions); + return; + } - videoCapturer.startCapture(width, height, fps); - Log.d(TAG, "ScreenCapturerAndroid.startCapture: " + width + "x" + height + "@" + fps); + requestPermissions( + requestPermissions, + /* successCallback */ new Callback() { + @Override + public void invoke(Object... args) { + List grantedPermissions = (List) args[0]; + + getUserMedia(constraints, result, mediaStream, grantedPermissions); + } + }, + /* errorCallback */ new Callback() { + @Override + public void invoke(Object... args) { + // According to step 10 Permission Failure of the + // getUserMedia() algorithm, if the user has denied + // permission, fail "with a new DOMException object whose + // name attribute has the value NotAllowedError." + result.error("DOMException", "NotAllowedError", null); + } + }); + } + + void getDisplayMedia( + final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) { + ConstraintsMap videoConstraintsMap = null; + ConstraintsMap videoConstraintsMandatory = null; + + if (constraints.getType("video") == ObjectType.Map) { + videoConstraintsMap = constraints.getMap("video"); + if (videoConstraintsMap.hasKey("mandatory") + && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { + videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); + } + } - String trackId = stateProvider.getNextTrackUUID(); - mVideoCapturers.put(trackId, videoCapturer); + final ConstraintsMap videoConstraintsMandatory2 = videoConstraintsMandatory; + + screenRequestPremissions( + new ResultReceiver(new Handler(Looper.getMainLooper())) { + @Override + protected void onReceiveResult(int requestCode, Bundle resultData) { + + /* Create ScreenCapture */ + int resultCode = resultData.getInt(GRANT_RESULTS); + Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); + + if (resultCode != Activity.RESULT_OK) { + result.error(null, "User didn't give permission to capture the screen.", null); + return; + } + + MediaStreamTrack[] tracks = new MediaStreamTrack[1]; + VideoCapturer videoCapturer = null; + videoCapturer = + new ScreenCapturerAndroid( + mediaProjectionData, + new MediaProjection.Callback() { + @Override + public void onStop() { + Log.e(TAG, "User revoked permission to capture the screen."); + result.error(null, "User revoked permission to capture the screen.", null); + } + }); + if (videoCapturer == null) { + result.error( + /* type */ "GetDisplayMediaFailed", "Failed to create new VideoCapturer!", null); + return; + } + + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); + VideoSource videoSource = pcFactory.createVideoSource(true); + + String threadName = Thread.currentThread().getName(); + SurfaceTextureHelper surfaceTextureHelper = + SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); + videoCapturer.initialize( + surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); + + WindowManager wm = + (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE); + + int width = wm.getDefaultDisplay().getWidth(); + int height = wm.getDefaultDisplay().getHeight(); + int fps = DEFAULT_FPS; + + videoCapturer.startCapture(width, height, fps); + Log.d(TAG, "ScreenCapturerAndroid.startCapture: " + width + "x" + height + "@" + fps); + + String trackId = stateProvider.getNextTrackUUID(); + mVideoCapturers.put(trackId, videoCapturer); + + tracks[0] = pcFactory.createVideoTrack(trackId, videoSource); + + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); + ConstraintsMap successResult = new ConstraintsMap(); + + for (MediaStreamTrack track : tracks) { + if (track == null) { + continue; + } + + String id = track.id(); + + if (track instanceof AudioTrack) { + mediaStream.addTrack((AudioTrack) track); + } else { + mediaStream.addTrack((VideoTrack) track); + } + stateProvider.getLocalTracks().put(id, track); + + ConstraintsMap track_ = new ConstraintsMap(); + String kind = track.kind(); + + track_.putBoolean("enabled", track.enabled()); + track_.putString("id", id); + track_.putString("kind", kind); + track_.putString("label", kind); + track_.putString("readyState", track.state().toString()); + track_.putBoolean("remote", false); + + if (track instanceof AudioTrack) { + audioTracks.pushMap(track_); + } else { + videoTracks.pushMap(track_); + } + } + + String streamId = mediaStream.getId(); + + Log.d(TAG, "MediaStream id: " + streamId); + stateProvider.getLocalStreams().put(streamId, mediaStream); + successResult.putString("streamId", streamId); + successResult.putArray("audioTracks", audioTracks.toArrayList()); + successResult.putArray("videoTracks", videoTracks.toArrayList()); + result.success(successResult.toMap()); + } + }); + } + + /** + * Implements {@code getUserMedia} with the knowledge that the necessary permissions have already + * been granted. If the necessary permissions have not been granted yet, they will NOT be + * requested. + */ + private void getUserMedia( + ConstraintsMap constraints, + Result result, + MediaStream mediaStream, + List grantedPermissions) { + MediaStreamTrack[] tracks = new MediaStreamTrack[2]; + + // If we fail to create either, destroy the other one and fail. + if ((grantedPermissions.contains(PERMISSION_AUDIO) + && (tracks[0] = getUserAudio(constraints)) == null) + || (grantedPermissions.contains(PERMISSION_VIDEO) + && (tracks[1] = getUserVideo(constraints)) == null)) { + for (MediaStreamTrack track : tracks) { + if (track != null) { + track.dispose(); + } + } - tracks[0] = pcFactory.createVideoTrack(trackId, videoSource); + // XXX The following does not follow the getUserMedia() algorithm + // specified by + // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia + // with respect to distinguishing the various causes of failure. + result.error(/* type */ "GetUserMediaFailed", "Failed to create new track", null); + return; + } - ConstraintsArray audioTracks = new ConstraintsArray(); - ConstraintsArray videoTracks = new ConstraintsArray(); - ConstraintsMap successResult = new ConstraintsMap(); + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); + ConstraintsMap successResult = new ConstraintsMap(); - for (MediaStreamTrack track : tracks) { - if (track == null) { + for (MediaStreamTrack track : tracks) { + if (track == null) { continue; - } + } - String id = track.id(); + String id = track.id(); - if (track instanceof AudioTrack) { + if (track instanceof AudioTrack) { mediaStream.addTrack((AudioTrack) track); - } else { + } else { mediaStream.addTrack((VideoTrack) track); - } - stateProvider.getLocalTracks().put(id, track); + } + stateProvider.getLocalTracks().put(id, track); - ConstraintsMap track_ = new ConstraintsMap(); - String kind = track.kind(); + ConstraintsMap track_ = new ConstraintsMap(); + String kind = track.kind(); - track_.putBoolean("enabled", track.enabled()); - track_.putString("id", id); - track_.putString("kind", kind); - track_.putString("label", kind); - track_.putString("readyState", track.state().toString()); - track_.putBoolean("remote", false); + track_.putBoolean("enabled", track.enabled()); + track_.putString("id", id); + track_.putString("kind", kind); + track_.putString("label", kind); + track_.putString("readyState", track.state().toString()); + track_.putBoolean("remote", false); - if (track instanceof AudioTrack) { + if (track instanceof AudioTrack) { audioTracks.pushMap(track_); - } else { + } else { videoTracks.pushMap(track_); - } } + } - String streamId = mediaStream.getId(); - - Log.d(TAG, "MediaStream id: " + streamId); - stateProvider.getLocalStreams().put(streamId, mediaStream); - successResult.putString("streamId", streamId); - successResult.putArray("audioTracks", audioTracks.toArrayList()); - successResult.putArray("videoTracks", videoTracks.toArrayList()); - result.success(successResult.toMap()); - } - }); - } - - /** - * Implements {@code getUserMedia} with the knowledge that the necessary permissions have already - * been granted. If the necessary permissions have not been granted yet, they will NOT be - * requested. - */ - private void getUserMedia( - ConstraintsMap constraints, - Result result, - MediaStream mediaStream, - List grantedPermissions) { - MediaStreamTrack[] tracks = new MediaStreamTrack[2]; - - // If we fail to create either, destroy the other one and fail. - if ((grantedPermissions.contains(PERMISSION_AUDIO) - && (tracks[0] = getUserAudio(constraints)) == null) - || (grantedPermissions.contains(PERMISSION_VIDEO) - && (tracks[1] = getUserVideo(constraints)) == null)) { - for (MediaStreamTrack track : tracks) { - if (track != null) { - track.dispose(); - } - } - - // XXX The following does not follow the getUserMedia() algorithm - // specified by - // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia - // with respect to distinguishing the various causes of failure. - result.error(/* type */ "GetUserMediaFailed", "Failed to create new track", null); - return; - } + String streamId = mediaStream.getId(); - ConstraintsArray audioTracks = new ConstraintsArray(); - ConstraintsArray videoTracks = new ConstraintsArray(); - ConstraintsMap successResult = new ConstraintsMap(); - - for (MediaStreamTrack track : tracks) { - if (track == null) { - continue; - } - - String id = track.id(); - - if (track instanceof AudioTrack) { - mediaStream.addTrack((AudioTrack) track); - } else { - mediaStream.addTrack((VideoTrack) track); - } - stateProvider.getLocalTracks().put(id, track); - - ConstraintsMap track_ = new ConstraintsMap(); - String kind = track.kind(); - - track_.putBoolean("enabled", track.enabled()); - track_.putString("id", id); - track_.putString("kind", kind); - track_.putString("label", kind); - track_.putString("readyState", track.state().toString()); - track_.putBoolean("remote", false); - - if (track instanceof AudioTrack) { - audioTracks.pushMap(track_); - } else { - videoTracks.pushMap(track_); - } - } + Log.d(TAG, "MediaStream id: " + streamId); + stateProvider.getLocalStreams().put(streamId, mediaStream); - String streamId = mediaStream.getId(); - - Log.d(TAG, "MediaStream id: " + streamId); - stateProvider.getLocalStreams().put(streamId, mediaStream); - - successResult.putString("streamId", streamId); - successResult.putArray("audioTracks", audioTracks.toArrayList()); - successResult.putArray("videoTracks", videoTracks.toArrayList()); - result.success(successResult.toMap()); - } - - private VideoTrack getUserVideo(ConstraintsMap constraints) { - ConstraintsMap videoConstraintsMap = null; - ConstraintsMap videoConstraintsMandatory = null; - if (constraints.getType("video") == ObjectType.Map) { - videoConstraintsMap = constraints.getMap("video"); - if (videoConstraintsMap.hasKey("mandatory") - && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { - videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); - } + successResult.putString("streamId", streamId); + successResult.putArray("audioTracks", audioTracks.toArrayList()); + successResult.putArray("videoTracks", videoTracks.toArrayList()); + result.success(successResult.toMap()); } - Log.i(TAG, "getUserMedia(video): " + videoConstraintsMap); - - // NOTE: to support Camera2, the device should: - // 1. Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP - // 2. all camera support level should greater than LEGACY - // see: - // https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#INFO_SUPPORTED_HARDWARE_LEVEL - // TODO Enable camera2 enumerator - CameraEnumerator cameraEnumerator; - - if (Camera2Enumerator.isSupported(applicationContext)) { - Log.d(TAG, "Creating video capturer using Camera2 API."); - cameraEnumerator = new Camera2Enumerator(applicationContext); - } else { - Log.d(TAG, "Creating video capturer using Camera1 API."); - cameraEnumerator = new Camera1Enumerator(false); - } + private VideoTrack getUserVideo(ConstraintsMap constraints) { + ConstraintsMap videoConstraintsMap = null; + ConstraintsMap videoConstraintsMandatory = null; + if (constraints.getType("video") == ObjectType.Map) { + videoConstraintsMap = constraints.getMap("video"); + if (videoConstraintsMap.hasKey("mandatory") + && videoConstraintsMap.getType("mandatory") == ObjectType.Map) { + videoConstraintsMandatory = videoConstraintsMap.getMap("mandatory"); + } + } - String facingMode = getFacingMode(videoConstraintsMap); - boolean isFacing = facingMode == null || !facingMode.equals("environment"); - String sourceId = getSourceIdConstraint(videoConstraintsMap); + Log.i(TAG, "getUserMedia(video): " + videoConstraintsMap); - VideoCapturer videoCapturer = createVideoCapturer(cameraEnumerator, isFacing, sourceId); + // NOTE: to support Camera2, the device should: + // 1. Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP + // 2. all camera support level should greater than LEGACY + // see: + // https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#INFO_SUPPORTED_HARDWARE_LEVEL + // TODO Enable camera2 enumerator + CameraEnumerator cameraEnumerator; - if (videoCapturer == null) { - return null; - } + if (Camera2Enumerator.isSupported(applicationContext)) { + Log.d(TAG, "Creating video capturer using Camera2 API."); + cameraEnumerator = new Camera2Enumerator(applicationContext); + } else { + Log.d(TAG, "Creating video capturer using Camera1 API."); + cameraEnumerator = new Camera1Enumerator(false); + } - PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); - VideoSource videoSource = pcFactory.createVideoSource(false); - String threadName = Thread.currentThread().getName(); - SurfaceTextureHelper surfaceTextureHelper = - SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); - videoCapturer.initialize( - surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); - - // Fall back to defaults if keys are missing. - int width = - videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minWidth") - ? videoConstraintsMandatory.getInt("minWidth") - : DEFAULT_WIDTH; - int height = - videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minHeight") - ? videoConstraintsMandatory.getInt("minHeight") - : DEFAULT_HEIGHT; - int fps = - videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minFrameRate") - ? videoConstraintsMandatory.getInt("minFrameRate") - : DEFAULT_FPS; - - videoCapturer.startCapture(width, height, fps); - - String trackId = stateProvider.getNextTrackUUID(); - mVideoCapturers.put(trackId, videoCapturer); - - Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + fps); - videoSource.adaptOutputFormat(width, height, fps); - - return pcFactory.createVideoTrack(trackId, videoSource); - } - - void removeVideoCapturer(String id) { - VideoCapturer videoCapturer = mVideoCapturers.get(id); - if (videoCapturer != null) { - try { - videoCapturer.stopCapture(); - } catch (InterruptedException e) { - Log.e(TAG, "removeVideoCapturer() Failed to stop video capturer"); - } finally { - videoCapturer.dispose(); - mVideoCapturers.remove(id); - } + String facingMode = getFacingMode(videoConstraintsMap); + boolean isFacing = facingMode == null || !facingMode.equals("environment"); + String sourceId = getSourceIdConstraint(videoConstraintsMap); + + VideoCapturer videoCapturer = createVideoCapturer(cameraEnumerator, isFacing, sourceId); + + if (videoCapturer == null) { + return null; + } + + PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); + VideoSource videoSource = pcFactory.createVideoSource(false); + String threadName = Thread.currentThread().getName(); + SurfaceTextureHelper surfaceTextureHelper = + SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); + videoCapturer.initialize( + surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); + + // Fall back to defaults if keys are missing. + int width = + videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minWidth") + ? videoConstraintsMandatory.getInt("minWidth") + : DEFAULT_WIDTH; + int height = + videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minHeight") + ? videoConstraintsMandatory.getInt("minHeight") + : DEFAULT_HEIGHT; + int fps = + videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minFrameRate") + ? videoConstraintsMandatory.getInt("minFrameRate") + : DEFAULT_FPS; + + videoCapturer.startCapture(width, height, fps); + + String trackId = stateProvider.getNextTrackUUID(); + mVideoCapturers.put(trackId, videoCapturer); + + Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + fps); + videoSource.adaptOutputFormat(width, height, fps); + + return pcFactory.createVideoTrack(trackId, videoSource); } - } - - private void requestPermissions( - final ArrayList permissions, - final Callback successCallback, - final Callback errorCallback) { - PermissionUtils.Callback callback = - (permissions_, grantResults) -> { - List grantedPermissions = new ArrayList<>(); - List deniedPermissions = new ArrayList<>(); - - for (int i = 0; i < permissions_.length; ++i) { - String permission = permissions_[i]; - int grantResult = grantResults[i]; - - if (grantResult == PackageManager.PERMISSION_GRANTED) { - grantedPermissions.add(permission); - } else { - deniedPermissions.add(permission); - } - } - - // Success means that all requested permissions were granted. - for (String p : permissions) { - if (!grantedPermissions.contains(p)) { - // According to step 6 of the getUserMedia() algorithm - // "if the result is denied, jump to the step Permission - // Failure." - errorCallback.invoke(deniedPermissions); - return; + + void removeVideoCapturer(String id) { + VideoCapturer videoCapturer = mVideoCapturers.get(id); + if (videoCapturer != null) { + try { + videoCapturer.stopCapture(); + } catch (InterruptedException e) { + Log.e(TAG, "removeVideoCapturer() Failed to stop video capturer"); + } finally { + videoCapturer.dispose(); + mVideoCapturers.remove(id); } - } - successCallback.invoke(grantedPermissions); - }; - - if (VERSION.SDK_INT >= VERSION_CODES.M) { - final Activity activity = stateProvider.getActivity(); - if (activity != null) { - PermissionUtils.requestPermissions( - activity, permissions.toArray(new String[permissions.size()]), callback); - } - } else { - successCallback.invoke(permissions); + } } - } - void switchCamera(String id, Result result) { - VideoCapturer videoCapturer = mVideoCapturers.get(id); - if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + id, null); - return; + @RequiresApi(api = VERSION_CODES.M) + private void requestPermissions( + final ArrayList permissions, + final Callback successCallback, + final Callback errorCallback) { + PermissionUtils.Callback callback = + (permissions_, grantResults) -> { + List grantedPermissions = new ArrayList<>(); + List deniedPermissions = new ArrayList<>(); + + for (int i = 0; i < permissions_.length; ++i) { + String permission = permissions_[i]; + int grantResult = grantResults[i]; + + if (grantResult == PackageManager.PERMISSION_GRANTED) { + grantedPermissions.add(permission); + } else { + deniedPermissions.add(permission); + } + } + + // Success means that all requested permissions were granted. + for (String p : permissions) { + if (!grantedPermissions.contains(p)) { + // According to step 6 of the getUserMedia() algorithm + // "if the result is denied, jump to the step Permission + // Failure." + errorCallback.invoke(deniedPermissions); + return; + } + } + successCallback.invoke(grantedPermissions); + }; + + final Activity activity = stateProvider.getActivity(); + if (activity != null) { + PermissionUtils.requestPermissions( + activity, permissions.toArray(new String[permissions.size()]), callback); + } } - CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer; - cameraVideoCapturer.switchCamera( - new CameraVideoCapturer.CameraSwitchHandler() { - @Override - public void onCameraSwitchDone(boolean b) { - result.success(b); - } - - @Override - public void onCameraSwitchError(String s) { - result.error("Switching camera failed", s, null); - } - }); - } - - /** - * Creates and starts recording of local stream to file - * - * @param path to the file for record - * @param videoTrack to record or null if only audio needed - * @param audioChannel channel for recording or null - * @throws Exception lot of different exceptions, pass back to dart layer to print them at least - */ - void startRecordingToFile( - String path, Integer id, @Nullable VideoTrack videoTrack, @Nullable AudioChannel audioChannel) - throws Exception { - AudioSamplesInterceptor interceptor = null; - if (audioChannel == AudioChannel.INPUT) { - interceptor = inputSamplesInterceptor; - } else if (audioChannel == AudioChannel.OUTPUT) { - if (outputSamplesInterceptor == null) { - outputSamplesInterceptor = new OutputAudioSamplesInterceptor(audioDeviceModule); - } - interceptor = outputSamplesInterceptor; - } - MediaRecorderImpl mediaRecorder = new MediaRecorderImpl(id, videoTrack, interceptor); - mediaRecorder.startRecording(new File(path)); - mediaRecorders.append(id, mediaRecorder); - } - - void stopRecording(Integer id) { - MediaRecorderImpl mediaRecorder = mediaRecorders.get(id); - if (mediaRecorder != null) { - mediaRecorder.stopRecording(); - mediaRecorders.remove(id); - File file = mediaRecorder.getRecordFile(); - if (file != null) { - ContentValues values = new ContentValues(3); - values.put(MediaStore.Video.Media.TITLE, file.getName()); - values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4"); - values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath()); - applicationContext - .getContentResolver() - .insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values); - } - } - } + void switchCamera(String id, Result result) { + VideoCapturer videoCapturer = mVideoCapturers.get(id); + if (videoCapturer == null) { + result.error(null, "Video capturer not found for id: " + id, null); + return; + } - void hasTorch(String trackId, Result result) { - VideoCapturer videoCapturer = mVideoCapturers.get(trackId); - if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + trackId, null); - return; + CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer; + cameraVideoCapturer.switchCamera( + new CameraVideoCapturer.CameraSwitchHandler() { + @Override + public void onCameraSwitchDone(boolean b) { + result.success(b); + } + + @Override + public void onCameraSwitchError(String s) { + result.error("Switching camera failed", s, null); + } + }); + } + + /** + * Creates and starts recording of local stream to file + * + * @param path to the file for record + * @param videoTrack to record or null if only audio needed + * @param audioChannel channel for recording or null + * @throws Exception lot of different exceptions, pass back to dart layer to print them at least + */ + void startRecordingToFile( + String path, Integer id, @Nullable VideoTrack videoTrack, @Nullable AudioChannel audioChannel) + throws Exception { + AudioSamplesInterceptor interceptor = null; + if (audioChannel == AudioChannel.INPUT) { + interceptor = inputSamplesInterceptor; + } else if (audioChannel == AudioChannel.OUTPUT) { + if (outputSamplesInterceptor == null) { + outputSamplesInterceptor = new OutputAudioSamplesInterceptor(audioDeviceModule); + } + interceptor = outputSamplesInterceptor; + } + MediaRecorderImpl mediaRecorder = new MediaRecorderImpl(id, videoTrack, interceptor); + mediaRecorder.startRecording(new File(path)); + mediaRecorders.append(id, mediaRecorder); + } + + void stopRecording(Integer id) { + MediaRecorderImpl mediaRecorder = mediaRecorders.get(id); + if (mediaRecorder != null) { + mediaRecorder.stopRecording(); + mediaRecorders.remove(id); + File file = mediaRecorder.getRecordFile(); + if (file != null) { + ContentValues values = new ContentValues(3); + values.put(MediaStore.Video.Media.TITLE, file.getName()); + values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4"); + values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath()); + applicationContext + .getContentResolver() + .insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values); + } + } } - if (VERSION.SDK_INT >= VERSION_CODES.LOLLIPOP && videoCapturer instanceof Camera2Capturer) { - CameraManager manager; - CameraDevice cameraDevice; - - try { - Object session = - getPrivateProperty( - Camera2Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - manager = - (CameraManager) - getPrivateProperty(Camera2Capturer.class, videoCapturer, "cameraManager"); - cameraDevice = - (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; - } - - boolean flashIsAvailable; - try { - CameraCharacteristics characteristics = - manager.getCameraCharacteristics(cameraDevice.getId()); - flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); - } - - result.success(flashIsAvailable); - return; - } + void hasTorch(String trackId, Result result) { + VideoCapturer videoCapturer = mVideoCapturers.get(trackId); + if (videoCapturer == null) { + result.error(null, "Video capturer not found for id: " + trackId, null); + return; + } - if (videoCapturer instanceof Camera1Capturer) { - Camera camera; - - try { - Object session = - getPrivateProperty( - Camera1Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; - } - - Parameters params = camera.getParameters(); - List supportedModes = params.getSupportedFlashModes(); - - result.success( - (supportedModes == null) ? false : supportedModes.contains(Parameters.FLASH_MODE_TORCH)); - return; - } + if (VERSION.SDK_INT >= VERSION_CODES.LOLLIPOP && videoCapturer instanceof Camera2Capturer) { + CameraManager manager; + CameraDevice cameraDevice; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), videoCapturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, videoCapturer, "cameraManager"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); + result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + return; + } - Log.e(TAG, "[TORCH] Video capturer not compatible"); - result.error(null, "Video capturer not compatible", null); - } + boolean flashIsAvailable; + try { + CameraCharacteristics characteristics = + manager.getCameraCharacteristics(cameraDevice.getId()); + flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } - @RequiresApi(api = VERSION_CODES.LOLLIPOP) - void setTorch(String trackId, boolean torch, Result result) { - VideoCapturer videoCapturer = mVideoCapturers.get(trackId); - if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + trackId, null); - return; - } + result.success(flashIsAvailable); + return; + } - if (videoCapturer instanceof Camera2Capturer) { - CameraCaptureSession captureSession; - CameraDevice cameraDevice; - CaptureFormat captureFormat; - int fpsUnitFactor; - Surface surface; - Handler cameraThreadHandler; - - try { - Object session = - getPrivateProperty( - Camera2Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - CameraManager manager = - (CameraManager) - getPrivateProperty(Camera2Capturer.class, videoCapturer, "cameraManager"); - captureSession = - (CameraCaptureSession) - getPrivateProperty(session.getClass(), session, "captureSession"); - cameraDevice = - (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - captureFormat = - (CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); - fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); - surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); - cameraThreadHandler = - (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; - } - - try { - final CaptureRequest.Builder captureRequestBuilder = - cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); - captureRequestBuilder.set( - CaptureRequest.FLASH_MODE, - torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, - new Range<>( - captureFormat.framerate.min / fpsUnitFactor, - captureFormat.framerate.max / fpsUnitFactor)); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); - captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); - captureRequestBuilder.addTarget(surface); - captureSession.setRepeatingRequest( - captureRequestBuilder.build(), null, cameraThreadHandler); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); - } - - result.success(null); - return; - } + if (videoCapturer instanceof Camera1Capturer) { + Camera camera; + + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), videoCapturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); + result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + return; + } + + Parameters params = camera.getParameters(); + List supportedModes = params.getSupportedFlashModes(); - if (videoCapturer instanceof Camera1Capturer) { - Camera camera; - try { - Object session = - getPrivateProperty( - Camera1Capturer.class.getSuperclass(), videoCapturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); - return; - } - - Camera.Parameters params = camera.getParameters(); - params.setFlashMode( - torch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); - camera.setParameters(params); - - result.success(null); - return; + result.success( + (supportedModes == null) ? false : supportedModes.contains(Parameters.FLASH_MODE_TORCH)); + return; + } + + Log.e(TAG, "[TORCH] Video capturer not compatible"); + result.error(null, "Video capturer not compatible", null); } - Log.e(TAG, "[TORCH] Video capturer not compatible"); - result.error(null, "Video capturer not compatible", null); - } - - private Object getPrivateProperty(Class klass, Object object, String fieldName) - throws NoSuchFieldWithNameException { - try { - Field field = klass.getDeclaredField(fieldName); - field.setAccessible(true); - return field.get(object); - } catch (NoSuchFieldException e) { - throw new NoSuchFieldWithNameException(klass.getName(), fieldName, e); - } catch (IllegalAccessException e) { - // Should never happen since we are calling `setAccessible(true)` - throw new RuntimeException(e); + @RequiresApi(api = VERSION_CODES.LOLLIPOP) + void setTorch(String trackId, boolean torch, Result result) { + VideoCapturer videoCapturer = mVideoCapturers.get(trackId); + if (videoCapturer == null) { + result.error(null, "Video capturer not found for id: " + trackId, null); + return; + } + + if (videoCapturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), videoCapturer, "currentSession"); + CameraManager manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, videoCapturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); + result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + result.success(null); + return; + } + + if (videoCapturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), videoCapturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); + result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + torch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + camera.setParameters(params); + + result.success(null); + return; + } + + Log.e(TAG, "[TORCH] Video capturer not compatible"); + result.error(null, "Video capturer not compatible", null); + } + + private Object getPrivateProperty(Class klass, Object object, String fieldName) + throws NoSuchFieldWithNameException { + try { + Field field = klass.getDeclaredField(fieldName); + field.setAccessible(true); + return field.get(object); + } catch (NoSuchFieldException e) { + throw new NoSuchFieldWithNameException(klass.getName(), fieldName, e); + } catch (IllegalAccessException e) { + // Should never happen since we are calling `setAccessible(true)` + throw new RuntimeException(e); + } } - } - private class NoSuchFieldWithNameException extends NoSuchFieldException { + private class NoSuchFieldWithNameException extends NoSuchFieldException { - String className; - String fieldName; + String className; + String fieldName; - NoSuchFieldWithNameException(String className, String fieldName, NoSuchFieldException e) { - super(e.getMessage()); - this.className = className; - this.fieldName = fieldName; + NoSuchFieldWithNameException(String className, String fieldName, NoSuchFieldException e) { + super(e.getMessage()); + this.className = className; + this.fieldName = fieldName; + } } - } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java index 54da24c1cd..c86d9c3a1c 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java @@ -5,241 +5,208 @@ import android.app.FragmentTransaction; import android.content.pm.PackageManager; import android.os.Build; -import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.ResultReceiver; +import androidx.annotation.NonNull; import androidx.annotation.RequiresApi; -import androidx.core.content.ContextCompat; import java.util.ArrayList; -/** - * Helper module for dealing with dynamic permissions, introduced in Android M - * (API level 23). - */ +/** Helper module for dealing with dynamic permissions, introduced in Android M (API level 23). */ public class PermissionUtils { - /** - * Constants for internal fields in the Bundle exchanged between - * the activity requesting the permissions and the auxiliary activity we - * spawn for this purpose. - */ - private static final String GRANT_RESULTS = "GRANT_RESULT"; - private static final String PERMISSIONS = "PERMISSION"; - private static final String REQUEST_CODE = "REQUEST_CODE"; - private static final String RESULT_RECEIVER = "RESULT_RECEIVER"; - - /** - * Incrementing counter for permission requests. Each request must have a - * unique numeric code. - */ - private static int requestCode; - - private static void requestPermissions( - Activity activity, - String[] permissions, - ResultReceiver resultReceiver) { - // Ask the Context whether we have already been granted the requested - // permissions. - int size = permissions.length; - int[] grantResults = new int[size]; - boolean permissionsGranted = true; - - for (int i = 0; i < size; ++i) { - int grantResult; - // No need to ask for permission on pre-Marshmallow - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) - grantResult = PackageManager.PERMISSION_GRANTED; - else - grantResult = activity.checkSelfPermission(permissions[i]); - - grantResults[i] = grantResult; - if (grantResult != PackageManager.PERMISSION_GRANTED) { - permissionsGranted = false; - } - } - - // Obviously, if the requested permissions have already been granted, - // there is nothing to ask the user about. On the other hand, if there - // is no Activity or the runtime permissions are not supported, there is - // no way to ask the user to grant us the denied permissions. - int requestCode = ++PermissionUtils.requestCode; - - if (permissionsGranted - // Here we test for the target SDK version with which *the app* - // was compiled. If we use Build.VERSION.SDK_INT that would give - // us the API version of the device itself, not the version the - // app was compiled for. When compiled for API level < 23 we - // must still use old permissions model, regardless of the - // Android version on the device. - || Build.VERSION.SDK_INT < Build.VERSION_CODES.M - || activity.getApplicationInfo().targetSdkVersion - < Build.VERSION_CODES.M) { - send(resultReceiver, requestCode, permissions, grantResults); - return; - } - - Bundle args = new Bundle(); - args.putInt(REQUEST_CODE, requestCode); - args.putParcelable(RESULT_RECEIVER, resultReceiver); - args.putStringArray(PERMISSIONS, permissions); - - RequestPermissionsFragment fragment = new RequestPermissionsFragment(); - fragment.setArguments(args); + /** + * Constants for internal fields in the Bundle exchanged between the activity requesting + * the permissions and the auxiliary activity we spawn for this purpose. + */ + private static final String GRANT_RESULTS = "GRANT_RESULT"; + + private static final String PERMISSIONS = "PERMISSION"; + private static final String REQUEST_CODE = "REQUEST_CODE"; + private static final String RESULT_RECEIVER = "RESULT_RECEIVER"; + + /** Incrementing counter for permission requests. Each request must have a unique numeric code. */ + private static int requestCode; + + private static void requestPermissions( + Activity activity, String[] permissions, ResultReceiver resultReceiver) { + // Ask the Context whether we have already been granted the requested + // permissions. + int size = permissions.length; + int[] grantResults = new int[size]; + boolean permissionsGranted = true; + + for (int i = 0; i < size; ++i) { + int grantResult; + // No need to ask for permission on pre-Marshmallow + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) + grantResult = PackageManager.PERMISSION_GRANTED; + else grantResult = activity.checkSelfPermission(permissions[i]); + + grantResults[i] = grantResult; + if (grantResult != PackageManager.PERMISSION_GRANTED) { + permissionsGranted = false; + } + } - FragmentTransaction transaction - = activity.getFragmentManager().beginTransaction().add( - fragment, - fragment.getClass().getName() + "-" + requestCode); + // Obviously, if the requested permissions have already been granted, + // there is nothing to ask the user about. On the other hand, if there + // is no Activity or the runtime permissions are not supported, there is + // no way to ask the user to grant us the denied permissions. + int requestCode = ++PermissionUtils.requestCode; + + if (permissionsGranted + // Here we test for the target SDK version with which *the app* + // was compiled. If we use Build.VERSION.SDK_INT that would give + // us the API version of the device itself, not the version the + // app was compiled for. When compiled for API level < 23 we + // must still use old permissions model, regardless of the + // Android version on the device. + || Build.VERSION.SDK_INT < Build.VERSION_CODES.M + || activity.getApplicationInfo().targetSdkVersion < Build.VERSION_CODES.M) { + send(resultReceiver, requestCode, permissions, grantResults); + return; + } - try { - transaction.commit(); - } catch (IllegalStateException ise) { - // Context is a Plugin, just send result back. - send(resultReceiver, requestCode, permissions, grantResults); - } + Bundle args = new Bundle(); + args.putInt(REQUEST_CODE, requestCode); + args.putParcelable(RESULT_RECEIVER, resultReceiver); + args.putStringArray(PERMISSIONS, permissions); + + RequestPermissionsFragment fragment = new RequestPermissionsFragment(); + fragment.setArguments(args); + + FragmentTransaction transaction = + activity + .getFragmentManager() + .beginTransaction() + .add(fragment, fragment.getClass().getName() + "-" + requestCode); + + try { + transaction.commit(); + } catch (IllegalStateException ise) { + // Context is a Plugin, just send result back. + send(resultReceiver, requestCode, permissions, grantResults); } + } + + public static void requestPermissions( + final Activity activity, final String[] permissions, final Callback callback) { + requestPermissions( + activity, + permissions, + new ResultReceiver(new Handler(Looper.getMainLooper())) { + @Override + protected void onReceiveResult(int resultCode, Bundle resultData) { + callback.invoke( + resultData.getStringArray(PERMISSIONS), resultData.getIntArray(GRANT_RESULTS)); + } + }); + } + + private static void send( + ResultReceiver resultReceiver, int requestCode, String[] permissions, int[] grantResults) { + Bundle resultData = new Bundle(); + resultData.putStringArray(PERMISSIONS, permissions); + resultData.putIntArray(GRANT_RESULTS, grantResults); + + resultReceiver.send(requestCode, resultData); + } + + public interface Callback { + void invoke(String[] permissions, int[] grantResults); + } + + /** + * Helper activity for requesting permissions. Android only allows requesting permissions from an + * activity and the result is reported in the onRequestPermissionsResult method. Since + * this package is a library we create an auxiliary activity and communicate back the results + * using a ResultReceiver. + */ + @RequiresApi(api = VERSION_CODES.M) + public static class RequestPermissionsFragment extends Fragment { + private void checkSelfPermissions(boolean requestPermissions) { + // Figure out which of the requested permissions are actually denied + // because we do not want to ask about the granted permissions + // (which Android supports). + Bundle args = getArguments(); + String[] permissions = args.getStringArray(PERMISSIONS); + int size = permissions.length; + Activity activity = getActivity(); + int[] grantResults = new int[size]; + ArrayList deniedPermissions = new ArrayList<>(); + + for (int i = 0; i < size; ++i) { + String permission = permissions[i]; + int grantResult; + // No need to ask for permission on pre-Marshmallow + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) + grantResult = PackageManager.PERMISSION_GRANTED; + else grantResult = activity.checkSelfPermission(permission); + + grantResults[i] = grantResult; + if (grantResult != PackageManager.PERMISSION_GRANTED) { + deniedPermissions.add(permission); + } + } - public static void requestPermissions( - final Activity activity, - final String[] permissions, - final Callback callback) { + int requestCode = args.getInt(REQUEST_CODE, 0); + + if (deniedPermissions.isEmpty() || !requestPermissions) { + // All permissions have already been granted or we cannot ask + // the user about the denied ones. + finish(); + send(args.getParcelable(RESULT_RECEIVER), requestCode, permissions, grantResults); + } else { + // Ask the user about the denied permissions. requestPermissions( - activity, - permissions, - new ResultReceiver(new Handler(Looper.getMainLooper())) { - @Override - protected void onReceiveResult( - int resultCode, - Bundle resultData) { - callback.invoke( - resultData.getStringArray(PERMISSIONS), - resultData.getIntArray(GRANT_RESULTS)); - } - }); + deniedPermissions.toArray(new String[deniedPermissions.size()]), requestCode); + } } - private static void send( - ResultReceiver resultReceiver, - int requestCode, - String[] permissions, - int[] grantResults) { - Bundle resultData = new Bundle(); - resultData.putStringArray(PERMISSIONS, permissions); - resultData.putIntArray(GRANT_RESULTS, grantResults); + private void finish() { + Activity activity = getActivity(); - resultReceiver.send(requestCode, resultData); + if (activity != null) { + activity.getFragmentManager().beginTransaction().remove(this).commitAllowingStateLoss(); + } } - public interface Callback { - void invoke(String[] permissions, int[] grantResults); + @Override + public void onRequestPermissionsResult( + int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { + Bundle args = getArguments(); + + if (args.getInt(REQUEST_CODE, 0) != requestCode) { + return; + } + + // XXX The super's documentation says: It is possible that the + // permissions request interaction with the user is interrupted. In + // this case you will receive empty permissions and results arrays + // which should be treated as a cancellation. + if (permissions.length == 0 || grantResults.length == 0) { + // The getUserMedia algorithm does not define a way to cancel + // the invocation so we have to redo the permission request. + finish(); + PermissionUtils.requestPermissions( + getActivity(), + args.getStringArray(PERMISSIONS), + (ResultReceiver) args.getParcelable(RESULT_RECEIVER)); + } else { + // We did not ask for all requested permissions, just the denied + // ones. But when we send the result, we have to answer about + // all requested permissions. + checkSelfPermissions(/* requestPermissions */ false); + } } - /** - * Helper activity for requesting permissions. Android only allows - * requesting permissions from an activity and the result is reported in the - * onRequestPermissionsResult method. Since this package is a - * library we create an auxiliary activity and communicate back the results - * using a ResultReceiver. - */ - public static class RequestPermissionsFragment extends Fragment { -// @RequiresApi(api = VERSION_CODES.M) - private void checkSelfPermissions(boolean requestPermissions) { - // Figure out which of the requested permissions are actually denied - // because we do not want to ask about the granted permissions - // (which Android supports). - Bundle args = getArguments(); - String[] permissions = args.getStringArray(PERMISSIONS); - int size = permissions.length; - Activity activity = getActivity(); - int[] grantResults = new int[size]; - ArrayList deniedPermissions = new ArrayList<>(); - - for (int i = 0; i < size; ++i) { - String permission = permissions[i]; - int grantResult; - // No need to ask for permission on pre-Marshmallow - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) - grantResult = PackageManager.PERMISSION_GRANTED; - else - grantResult = activity.checkSelfPermission(permission); - - grantResults[i] = grantResult; - if (grantResult != PackageManager.PERMISSION_GRANTED) { - deniedPermissions.add(permission); - } - } - - int requestCode = args.getInt(REQUEST_CODE, 0); - - if (deniedPermissions.isEmpty() || !requestPermissions) { - // All permissions have already been granted or we cannot ask - // the user about the denied ones. - finish(); - send( - args.getParcelable(RESULT_RECEIVER), - requestCode, - permissions, - grantResults); - } else { - // Ask the user about the denied permissions. - requestPermissions( - deniedPermissions.toArray( - new String[deniedPermissions.size()]), - requestCode); - } - } - - private void finish() { - Activity activity = getActivity(); - - if (activity != null) { - activity.getFragmentManager().beginTransaction() - .remove(this) - .commitAllowingStateLoss(); - } - } - - @RequiresApi(api = VERSION_CODES.M) - @Override - public void onRequestPermissionsResult( - int requestCode, - String[] permissions, - int[] grantResults) { - Bundle args = getArguments(); - - if (args.getInt(REQUEST_CODE, 0) != requestCode) { - return; - } - - // XXX The super's documentation says: It is possible that the - // permissions request interaction with the user is interrupted. In - // this case you will receive empty permissions and results arrays - // which should be treated as a cancellation. - if (permissions.length == 0 || grantResults.length == 0) { - // The getUserMedia algorithm does not define a way to cancel - // the invocation so we have to redo the permission request. - finish(); - PermissionUtils.requestPermissions( - getActivity(), - args.getStringArray(PERMISSIONS), - (ResultReceiver) args.getParcelable(RESULT_RECEIVER)); - } else { - // We did not ask for all requested permissions, just the denied - // ones. But when we send the result, we have to answer about - // all requested permissions. - checkSelfPermissions(/* requestPermissions */ false); - } - } - - @Override - public void onResume() { - super.onResume(); + @Override + public void onResume() { + super.onResume(); - if (VERSION.SDK_INT >= VERSION_CODES.M) { - checkSelfPermissions(/* requestPermissions */ true); - } - } + checkSelfPermissions(/* requestPermissions */ true); } + } } From fb62b4074f24c2bf4b6080b7c54d8b8136cbca4d Mon Sep 17 00:00:00 2001 From: Sebastian Roth Date: Sat, 13 Jun 2020 17:13:40 +0100 Subject: [PATCH 6/6] Bump webrtc dependency as well --- android/build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/android/build.gradle b/android/build.gradle index dbd4233cff..b30a3d8f14 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -41,6 +41,6 @@ android { } dependencies { - api 'org.webrtc:google-webrtc:1.0.28262' - implementation "androidx.annotation:annotation:1.0.1" + api 'org.webrtc:google-webrtc:1.0.30039' + implementation "androidx.annotation:annotation:1.1.0" }