diff --git a/README.md b/README.md index 09bb76ab04..6be3b4f784 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ WebRTC plugin for Flutter Mobile/Desktop/Web | Audio/Video | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | [WIP] | | | Data Channel | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | [WIP] | | | Screen Capture | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | | | | -| Unified-Plan | | | | | | | | +| Unified-Plan | :heavy_check_mark: | :heavy_check_mark: | | :heavy_check_mark: | | | | | MediaRecorder| :warning: | :warning: | :heavy_check_mark: | | | | | ## Usage diff --git a/android/build.gradle b/android/build.gradle index b30a3d8f14..411a9bdda4 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -25,7 +25,7 @@ android { compileSdkVersion 28 defaultConfig { - minSdkVersion 18 + minSdkVersion 21 testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" consumerProguardFiles 'proguard-rules.pro' } @@ -41,6 +41,6 @@ android { } dependencies { - api 'org.webrtc:google-webrtc:1.0.30039' + api 'org.webrtc:google-webrtc:1.0.32006' implementation "androidx.annotation:annotation:1.1.0" } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java index ca8df79a4b..e593e557fe 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java @@ -170,9 +170,10 @@ private void setVideoTrack(VideoTrack videoTrack) { this.videoTrack = videoTrack; if (videoTrack != null) { + Log.w(TAG, "FlutterRTCVideoRenderer.setVideoTrack, set video track to " + videoTrack.id()); tryAddRendererToVideoTrack(); } else { - Log.w(TAG, "VideoTrack is null"); + Log.w(TAG, "FlutterRTCVideoRenderer.setVideoTrack, set video track to null"); } } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java index 8d94262ec5..121c92dd0b 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java @@ -211,6 +211,12 @@ public void onResume() { this.applicationContext = applicationContext; } + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg,null); + Log.d(TAG, errorMsg); + } + /** * Includes default constraints set for the audio media type. * @@ -396,7 +402,7 @@ void getUserMedia( // requestedMediaTypes is the empty set, the method invocation fails // with a TypeError. if (requestPermissions.isEmpty()) { - result.error("TypeError", "constraints requests no media types", null); + resultError("getUserMedia", "TypeError, constraints requests no media types", result); return; } @@ -423,7 +429,7 @@ public void invoke(Object... args) { // getUserMedia() algorithm, if the user has denied // permission, fail "with a new DOMException object whose // name attribute has the value NotAllowedError." - result.error("DOMException", "NotAllowedError", null); + resultError("getUserMedia", "DOMException, NotAllowedError", result); } }); } @@ -453,7 +459,7 @@ protected void onReceiveResult(int requestCode, Bundle resultData) { Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); if (resultCode != Activity.RESULT_OK) { - result.error(null, "User didn't give permission to capture the screen.", null); + resultError("screenRequestPremissions", "User didn't give permission to capture the screen.", result); return; } @@ -465,13 +471,11 @@ protected void onReceiveResult(int requestCode, Bundle resultData) { new MediaProjection.Callback() { @Override public void onStop() { - Log.e(TAG, "User revoked permission to capture the screen."); - result.error(null, "User revoked permission to capture the screen.", null); + resultError("MediaProjection.Callback()", "User revoked permission to capture the screen.", result); } }); if (videoCapturer == null) { - result.error( - /* type */ "GetDisplayMediaFailed", "Failed to create new VideoCapturer!", null); + resultError("screenRequestPremissions", "GetDisplayMediaFailed, User revoked permission to capture the screen.", result); return; } @@ -573,7 +577,7 @@ private void getUserMedia( // specified by // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia // with respect to distinguishing the various causes of failure. - result.error(/* type */ "GetUserMediaFailed", "Failed to create new track", null); + resultError("getUserMedia", "Failed to create new track.", result); return; } @@ -753,7 +757,7 @@ private void requestPermissions( void switchCamera(String id, Result result) { VideoCapturer videoCapturer = mVideoCapturers.get(id); if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + id, null); + resultError("switchCamera", "Video capturer not found for id: " + id, result); return; } @@ -767,7 +771,7 @@ public void onCameraSwitchDone(boolean b) { @Override public void onCameraSwitchError(String s) { - result.error("Switching camera failed", s, null); + resultError("switchCamera", "Switching camera failed: " + id, result); } }); } @@ -818,7 +822,7 @@ void stopRecording(Integer id) { void hasTorch(String trackId, Result result) { VideoCapturer videoCapturer = mVideoCapturers.get(trackId); if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + trackId, null); + resultError("hasTorch", "Video capturer not found for id: " + trackId, result); return; } @@ -837,8 +841,7 @@ void hasTorch(String trackId, Result result) { (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera2Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); return; } @@ -866,8 +869,7 @@ void hasTorch(String trackId, Result result) { camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera1Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); return; } @@ -879,15 +881,14 @@ void hasTorch(String trackId, Result result) { return; } - Log.e(TAG, "[TORCH] Video capturer not compatible"); - result.error(null, "Video capturer not compatible", null); + resultError("hasTorch", "[TORCH] Video capturer not compatible", result); } @RequiresApi(api = VERSION_CODES.LOLLIPOP) void setTorch(String trackId, boolean torch, Result result) { VideoCapturer videoCapturer = mVideoCapturers.get(trackId); if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + trackId, null); + resultError("setTorch", "Video capturer not found for id: " + trackId, result); return; } @@ -919,8 +920,7 @@ void setTorch(String trackId, boolean torch, Result result) { (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera2Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); return; } @@ -959,8 +959,7 @@ void setTorch(String trackId, boolean torch, Result result) { camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera1Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); return; } @@ -972,9 +971,7 @@ void setTorch(String trackId, boolean torch, Result result) { result.success(null); return; } - - Log.e(TAG, "[TORCH] Video capturer not compatible"); - result.error(null, "Video capturer not compatible", null); + resultError("setTorch", "[TORCH] Video capturer not compatible", result); } private Object getPrivateProperty(Class klass, Object object, String fieldName) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index 21f8c91975..f05a50c05b 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -121,6 +121,12 @@ interface AudioManager { this.audioManager = audioManager; } + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg,null); + Log.d(TAG, errorMsg); + } + void dispose() { mPeerConnectionObservers.clear(); } @@ -275,10 +281,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { } result.success("success"); } else { - Log.d(TAG, "dtmf() peerConnection is null"); - result - .error("dtmf", "sendDtmf() peerConnection is null", - null); + resultError("dtmf", "peerConnection is null", result); } break; } @@ -314,8 +317,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { String data = call.argument("data"); byteBuffer = ByteBuffer.wrap(data.getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { - Log.d(TAG, "Could not encode text string as UTF-8."); - result.error("dataChannelSendFailed", "Could not encode text string as UTF-8.", null); + resultError("dataChannelSend", "Could not encode text string as UTF-8.", result); return; } } @@ -400,8 +402,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { int textureId = call.argument("textureId"); FlutterRTCVideoRenderer render = renders.get(textureId); if (render == null) { - result.error("FlutterRTCVideoRendererNotFound", "render [" + textureId + "] not found !", - null); + resultError("videoRendererDispose", "render [" + textureId + "] not found !", result); return; } render.Dispose(); @@ -412,16 +413,18 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { case "videoRendererSetSrcObject": { int textureId = call.argument("textureId"); String streamId = call.argument("streamId"); - String peerConnectionId = call.argument("ownerTag"); + String ownerTag = call.argument("ownerTag"); FlutterRTCVideoRenderer render = renders.get(textureId); - if (render == null) { - result.error("FlutterRTCVideoRendererNotFound", "render [" + textureId + "] not found !", - null); + resultError("videoRendererSetSrcObject", "render [" + textureId + "] not found !", result); return; } - - MediaStream stream = getStreamForId(streamId, peerConnectionId); + MediaStream stream = null; + if (ownerTag.equals("local")) { + stream = localStreams.get(streamId); + } else { + stream = getStreamForId(streamId, ownerTag); + } render.setStream(stream); result.success(null); break; @@ -487,10 +490,10 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { getUserMediaImpl.startRecordingToFile(path, recorderId, videoTrack, audioChannel); result.success(null); } else { - result.error("0", "No tracks", null); + resultError("startRecordToFile", "No tracks", result); } } catch (Exception e) { - result.error("-1", e.getMessage(), e); + resultError("startRecordToFile", e.getMessage(), result); } break; case "stopRecordToFile": @@ -506,10 +509,10 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { if (track instanceof VideoTrack) { new FrameCapturer((VideoTrack) track, new File(path), result); } else { - result.error(null, "It's not video track", null); + resultError("captureFrame", "It's not video track", result); } } else { - result.error(null, "Track is null", null); + resultError("captureFrame", "Track is null", result); } break; case "getLocalDescription": { @@ -522,9 +525,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { params.putString("type", sdp.type.canonicalForm()); result.success(params.toMap()); } else { - Log.d(TAG, "getLocalDescription() peerConnection is null"); - result.error("getLocalDescriptionFailed", "getLocalDescription() peerConnection is null", - null); + resultError("getLocalDescription", "peerConnection is nulll", result); } break; } @@ -538,10 +539,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { params.putString("type", sdp.type.canonicalForm()); result.success(params.toMap()); } else { - Log.d(TAG, "getRemoteDescription() peerConnection is null"); - result - .error("getRemoteDescriptionFailed", "getRemoteDescription() peerConnection is null", - null); + resultError("getRemoteDescription", "peerConnection is nulll", result); } break; } @@ -553,11 +551,96 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { peerConnectionSetConfiguration(new ConstraintsMap(configuration), peerConnection); result.success(null); } else { - Log.d(TAG, "setConfiguration() peerConnection is null"); - result.error("setConfigurationFailed", "setConfiguration() peerConnection is null", null); + resultError("setConfiguration", "peerConnection is nulll", result); } break; } + case "createSender": { + String peerConnectionId = call.argument("peerConnectionId"); + String kind = call.argument("kind"); + String streamId = call.argument("streamId"); + createSender(peerConnectionId, kind, streamId, result); + break; + } + case "closeSender": { + String peerConnectionId = call.argument("peerConnectionId"); + String senderId = call.argument("senderId"); + stopSender(peerConnectionId, senderId, result); + break; + } + case "addTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String trackId = call.argument("trackId"); + List streamIds = call.argument("streamIds"); + addTrack(peerConnectionId, trackId, streamIds, result); + break; + } + case "removeTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String senderId = call.argument("senderId"); + removeTrack(peerConnectionId, senderId, result); + break; + } + case "addTransceiver": { + String peerConnectionId = call.argument("peerConnectionId"); + Map transceiverInit = call.argument("transceiverInit"); + if(call.hasArgument("trackId")) { + String trackId = call.argument("trackId"); + addTransceiver(peerConnectionId, trackId, transceiverInit, result); + } else if(call.hasArgument("mediaType")) { + String mediaType = call.argument("mediaType"); + addTransceiverOfType(peerConnectionId, mediaType, transceiverInit, result); + } else { + resultError("addTransceiver", "Incomplete parameters", result); + } + break; + } + case "rtpTransceiverSetDirection": { + String peerConnectionId = call.argument("peerConnectionId"); + String direction = call.argument("direction"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverSetDirection(peerConnectionId, direction, transceiverId, result); + break; + } + case "rtpTransceiverGetCurrentDirection": { + String peerConnectionId = call.argument("peerConnectionId"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverGetCurrentDirection(peerConnectionId, transceiverId, result); + break; + } + case "rtpTransceiverStop": { + String peerConnectionId = call.argument("peerConnectionId"); + String transceiverId = call.argument("transceiverId"); + rtpTransceiverStop(peerConnectionId, transceiverId, result); + break; + } + case "rtpSenderSetParameters": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + Map parameters = call.argument("parameters"); + rtpSenderSetParameters(peerConnectionId, rtpSenderId, parameters, result); + break; + } + case "rtpSenderReplaceTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + String trackId = call.argument("trackId"); + rtpSenderSetTrack(peerConnectionId, rtpSenderId, trackId, true, result); + break; + } + case "rtpSenderSetTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + String trackId = call.argument("trackId"); + rtpSenderSetTrack(peerConnectionId, rtpSenderId, trackId, false, result); + break; + } + case "rtpSenderDispose": { + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + rtpSenderDispose(peerConnectionId, rtpSenderId, result); + break; + } default: result.notImplemented(); break; @@ -891,23 +974,23 @@ public Activity getActivity() { } MediaStream getStreamForId(String id, String peerConnectionId) { - MediaStream stream = localStreams.get(id); - - if (stream == null) { - if (peerConnectionId.length() > 0) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + MediaStream stream = null; + if (peerConnectionId.length() > 0) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + stream = pco.remoteStreams.get(id); + } else { + for (Entry entry : mPeerConnectionObservers + .entrySet()) { + PeerConnectionObserver pco = entry.getValue(); stream = pco.remoteStreams.get(id); - } else { - for (Entry entry : mPeerConnectionObservers - .entrySet()) { - PeerConnectionObserver pco = entry.getValue(); - stream = pco.remoteStreams.get(id); - if (stream != null) { - break; - } + if (stream != null) { + break; } } } + if (stream == null) { + stream = localStreams.get(id); + } return stream; } @@ -938,9 +1021,7 @@ public void getUserMedia(ConstraintsMap constraints, Result result) { // specified by // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "getUserMediaFailed", - "Failed to create new media stream", null); + resultError("getUserMediaFailed", "Failed to create new media stream", result); return; } @@ -956,9 +1037,7 @@ public void getDisplayMedia(ConstraintsMap constraints, Result result) { // specified by // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "getDisplayMedia", - "Failed to create new media stream", null); + resultError("getDisplayMedia", "Failed to create new media stream", result); return; } @@ -995,7 +1074,7 @@ private void createLocalMediaStream(Result result) { localStreams.put(streamId, mediaStream); if (mediaStream == null) { - result.error(/* type */ "createLocalMediaStream", "Failed to create new media stream", null); + resultError("createLocalMediaStream", "Failed to create new media stream", result); return; } Map resultMap = new HashMap<>(); @@ -1056,14 +1135,10 @@ public void mediaStreamAddTrack(final String streaemId, final String trackId, Re mediaStream.addTrack((VideoTrack) track); } } else { - String errorMsg = "mediaStreamAddTrack() track [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamAddTrack", errorMsg, null); + resultError("mediaStreamAddTrack", "mediaStreamAddTrack() track [" + trackId + "] is null", result); } } else { - String errorMsg = "mediaStreamAddTrack() stream [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamAddTrack", errorMsg, null); + resultError("mediaStreamAddTrack", "mediaStreamAddTrack() track [" + trackId + "] is null", result); } result.success(null); } @@ -1079,14 +1154,10 @@ public void mediaStreamRemoveTrack(final String streaemId, final String trackId, mediaStream.removeTrack((VideoTrack) track); } } else { - String errorMsg = "mediaStreamRemoveTrack() track [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamRemoveTrack", errorMsg, null); + resultError("mediaStreamRemoveTrack", "mediaStreamAddTrack() track [" + trackId + "] is null", result); } } else { - String errorMsg = "mediaStreamRemoveTrack() stream [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamRemoveTrack", errorMsg, null); + resultError("mediaStreamRemoveTrack", "mediaStreamAddTrack() track [" + trackId + "] is null", result); } result.success(null); } @@ -1161,9 +1232,7 @@ public void peerConnectionAddStream(final String streamId, final String id, Resu Log.d(TAG, "addStream" + result); result.success(res); } else { - Log.d(TAG, "peerConnectionAddStream() peerConnection is null"); - result.error("peerConnectionAddStreamFailed", - "peerConnectionAddStream() peerConnection is null", null); + resultError("peerConnectionAddStream", "peerConnection is null", result); } } @@ -1178,9 +1247,7 @@ public void peerConnectionRemoveStream(final String streamId, final String id, R peerConnection.removeStream(mediaStream); result.success(null); } else { - Log.d(TAG, "peerConnectionRemoveStream() peerConnection is null"); - result.error("peerConnectionRemoveStreamFailed", - "peerConnectionAddStream() peerConnection is null", null); + resultError("peerConnectionRemoveStream", "peerConnection is null", result); } } @@ -1194,8 +1261,8 @@ public void peerConnectionCreateOffer( peerConnection.createOffer(new SdpObserver() { @Override public void onCreateFailure(String s) { - result.error("WEBRTC_CREATE_OFFER_ERROR", s, null); - } + resultError("peerConnectionCreateOffer", "WEBRTC_CREATE_OFFER_ERROR: " + s, result); + } @Override public void onCreateSuccess(final SessionDescription sdp) { @@ -1214,8 +1281,7 @@ public void onSetSuccess() { } }, parseMediaConstraints(constraints)); } else { - Log.d(TAG, "peerConnectionCreateOffer() peerConnection is null"); - result.error("WEBRTC_CREATE_OFFER_ERROR", "peerConnection is null", null); + resultError("peerConnectionCreateOffer", "WEBRTC_CREATE_OFFER_ERROR", result); } } @@ -1229,7 +1295,7 @@ public void peerConnectionCreateAnswer( peerConnection.createAnswer(new SdpObserver() { @Override public void onCreateFailure(String s) { - result.error("WEBRTC_CREATE_ANSWER_ERROR", s, null); + resultError("peerConnectionCreateAnswer", "WEBRTC_CREATE_ANSWER_ERROR: " + s, result); } @Override @@ -1249,16 +1315,13 @@ public void onSetSuccess() { } }, parseMediaConstraints(constraints)); } else { - Log.d(TAG, "peerConnectionCreateAnswer() peerConnection is null"); - result.error("WEBRTC_CREATE_ANSWER_ERROR", "peerConnection is null", null); + resultError("peerConnectionCreateAnswer", "peerConnection is null", result); } } public void peerConnectionSetLocalDescription(ConstraintsMap sdpMap, final String id, final Result result) { PeerConnection peerConnection = getPeerConnection(id); - - Log.d(TAG, "peerConnectionSetLocalDescription() start"); if (peerConnection != null) { SessionDescription sdp = new SessionDescription( Type.fromCanonicalForm(sdpMap.getString("type")), @@ -1281,22 +1344,17 @@ public void onCreateFailure(String s) { @Override public void onSetFailure(String s) { - result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", s, null); + resultError("peerConnectionSetLocalDescription", "WEBRTC_SET_LOCAL_DESCRIPTION_ERROR: " + s, result); } }, sdp); } else { - Log.d(TAG, "peerConnectionSetLocalDescription() peerConnection is null"); - result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", "peerConnection is null", null); + resultError("peerConnectionSetLocalDescription", "WEBRTC_SET_LOCAL_DESCRIPTION_ERROR: peerConnection is null", result); } - Log.d(TAG, "peerConnectionSetLocalDescription() end"); } public void peerConnectionSetRemoteDescription(final ConstraintsMap sdpMap, final String id, final Result result) { PeerConnection peerConnection = getPeerConnection(id); - // final String d = sdpMap.getString("type"); - - Log.d(TAG, "peerConnectionSetRemoteDescription() start"); if (peerConnection != null) { SessionDescription sdp = new SessionDescription( Type.fromCanonicalForm(sdpMap.getString("type")), @@ -1319,21 +1377,18 @@ public void onCreateFailure(String s) { @Override public void onSetFailure(String s) { - result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", s, null); + resultError("peerConnectionSetRemoteDescription", "WEBRTC_SET_REMOTE_DESCRIPTION_ERROR: " + s, result); } }, sdp); } else { - Log.d(TAG, "peerConnectionSetRemoteDescription() peerConnection is null"); - result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", "peerConnection is null", null); + resultError("peerConnectionSetRemoteDescription", "WEBRTC_SET_REMOTE_DESCRIPTION_ERROR: peerConnection is null", result); } - Log.d(TAG, "peerConnectionSetRemoteDescription() end"); } public void peerConnectionAddICECandidate(ConstraintsMap candidateMap, final String id, final Result result) { boolean res = false; PeerConnection peerConnection = getPeerConnection(id); - Log.d(TAG, "peerConnectionAddICECandidate() start"); if (peerConnection != null) { IceCandidate candidate = new IceCandidate( candidateMap.getString("sdpMid"), @@ -1342,18 +1397,15 @@ public void peerConnectionAddICECandidate(ConstraintsMap candidateMap, final Str ); res = peerConnection.addIceCandidate(candidate); } else { - Log.d(TAG, "peerConnectionAddICECandidate() peerConnection is null"); - result.error("peerConnectionAddICECandidateFailed", - "peerConnectionAddICECandidate() peerConnection is null", null); + resultError("peerConnectionAddICECandidate", "peerConnection is null", result); } result.success(res); - Log.d(TAG, "peerConnectionAddICECandidate() end"); } public void peerConnectionGetStats(String trackId, String id, final Result result) { PeerConnectionObserver pco = mPeerConnectionObservers.get(id); if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "peerConnectionGetStats() peerConnection is null"); + resultError("peerConnectionGetStats", "peerConnection is null", result); } else { pco.getStats(trackId, result); } @@ -1438,4 +1490,129 @@ public void dataChannelClose(String peerConnectionId, int dataChannelId) { public void setActivity(Activity activity) { this.activity = activity; } + + public void createSender(String peerConnectionId, String kind, String streamId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("createSender", "peerConnection is null", result); + } else { + pco.createSender(kind, streamId, result); + } + } + + public void stopSender(String peerConnectionId, String senderId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("stopSender", "peerConnection is null", result); + } else { + pco.closeSender(senderId, result); + } + } + + public void addTrack(String peerConnectionId, String trackId, List streamIds, Result result){ + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + MediaStreamTrack track = localTracks.get(trackId); + if (track == null) { + resultError("addTrack", "track is null", result); + return; + } + if (pco == null || pco.getPeerConnection() == null) { + resultError("addTrack", "peerConnection is null", result); + } else { + pco.addTrack(track, streamIds, result); + } + } + + public void removeTrack(String peerConnectionId, String senderId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("removeTrack", "peerConnection is null", result); + } else { + pco.removeTrack(senderId, result); + } + } + + public void addTransceiver(String peerConnectionId, String trackId, Map transceiverInit, + Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + MediaStreamTrack track = localTracks.get(trackId); + if (track == null) { + resultError("addTransceiver", "track is null", result); + return; + } + if (pco == null || pco.getPeerConnection() == null) { + resultError("addTransceiver", "peerConnection is null", result); + } else { + pco.addTransceiver(track, transceiverInit, result); + } + } + + public void addTransceiverOfType(String peerConnectionId, String mediaType, Map transceiverInit, + Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("addTransceiverOfType", "peerConnection is null", result); + } else { + pco.addTransceiverOfType(mediaType, transceiverInit, result); + } + } + + public void rtpTransceiverSetDirection(String peerConnectionId, String direction, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpTransceiverSetDirection", "peerConnection is null", result); + } else { + pco.rtpTransceiverSetDirection(direction, transceiverId, result); + } + } + + public void rtpTransceiverGetCurrentDirection(String peerConnectionId, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpTransceiverSetDirection", "peerConnection is null", result); + } else { + pco.rtpTransceiverGetCurrentDirection(transceiverId, result); + } + } + + public void rtpTransceiverStop(String peerConnectionId, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpTransceiverStop", "peerConnection is null", result); + } else { + pco.rtpTransceiverStop(transceiverId, result); + } + } + + public void rtpSenderSetParameters(String peerConnectionId, String rtpSenderId, Map parameters, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderSetParameters", "peerConnection is null", result); + } else { + pco.rtpSenderSetParameters(rtpSenderId, parameters, result); + } + } + + public void rtpSenderDispose(String peerConnectionId, String rtpSenderId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderDispose", "peerConnection is null", result); + } else { + pco.rtpSenderDispose(rtpSenderId, result); + } + } + + public void rtpSenderSetTrack(String peerConnectionId, String rtpSenderId, String trackId, boolean replace, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderSetTrack", "peerConnection is null", result); + } else { + MediaStreamTrack track = localTracks.get(trackId); + if (track == null) { + resultError("rtpSenderSetTrack", "track is null", result); + return; + } + pco.rtpSenderSetTrack(rtpSenderId, track, result, replace); + } + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index 532718ac7b..cd03ae0b45 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -9,25 +9,31 @@ import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.EventChannel; import io.flutter.plugin.common.MethodChannel.Result; +import java.lang.reflect.Field; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Map; +import java.util.List; import org.webrtc.AudioTrack; +import org.webrtc.CandidatePairChangeEvent; import org.webrtc.DataChannel; +import org.webrtc.DtmfSender; import org.webrtc.IceCandidate; import org.webrtc.MediaStream; import org.webrtc.MediaStreamTrack; import org.webrtc.PeerConnection; +import org.webrtc.RtpParameters; import org.webrtc.RtpReceiver; +import org.webrtc.RtpSender; +import org.webrtc.RtpTransceiver; import org.webrtc.StatsObserver; import org.webrtc.StatsReport; import org.webrtc.VideoTrack; class PeerConnectionObserver implements PeerConnection.Observer, EventChannel.StreamHandler { - private final static String TAG = FlutterWebRTCPlugin.TAG; - private final SparseArray dataChannels = new SparseArray<>(); private BinaryMessenger messenger; private final String id; @@ -35,7 +41,6 @@ class PeerConnectionObserver implements PeerConnection.Observer, EventChannel.St final Map remoteStreams = new HashMap<>(); final Map remoteTracks = new HashMap<>(); private final StateProvider stateProvider; - private final EventChannel eventChannel; private EventChannel.EventSink eventSink; @@ -48,6 +53,12 @@ class PeerConnectionObserver implements PeerConnection.Observer, EventChannel.St eventChannel.setStreamHandler(this); } + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg,null); + Log.d(TAG, errorMsg); + } + @Override public void onListen(Object o, EventChannel.EventSink sink) { eventSink = new AnyThreadSink(sink); @@ -75,7 +86,6 @@ void close() { void dispose() { this.close(); - peerConnection.dispose(); eventChannel.setStreamHandler(null); } @@ -109,39 +119,67 @@ void createDataChannel(String label, ConstraintsMap config, Result result) { // breakages). int dataChannelId = init.id; if (dataChannel != null && -1 != dataChannelId) { - dataChannels.put(dataChannelId, dataChannel); - registerDataChannelObserver(dataChannelId, dataChannel); + dataChannels.put(dataChannelId, dataChannel); + registerDataChannelObserver(dataChannelId, dataChannel); - ConstraintsMap params = new ConstraintsMap(); - params.putInt("id", dataChannel.id()); - params.putString("label", dataChannel.label()); - result.success(params.toMap()); + ConstraintsMap params = new ConstraintsMap(); + params.putInt("id", dataChannel.id()); + params.putString("label", dataChannel.label()); + result.success(params.toMap()); } else { - result.error("createDataChannel", - "Can't create data-channel for id: " + dataChannelId, - null); + resultError("createDataChannel", "Can't create data-channel for id: " + dataChannelId, result); } } - void dataChannelClose(int dataChannelId) { - DataChannel dataChannel = dataChannels.get(dataChannelId); - if (dataChannel != null) { - dataChannel.close(); - dataChannels.remove(dataChannelId); - } else { - Log.d(TAG, "dataChannelClose() dataChannel is null"); + void dataChannelClose(int dataChannelId) { + DataChannel dataChannel = dataChannels.get(dataChannelId); + if (dataChannel != null) { + dataChannel.close(); + dataChannels.remove(dataChannelId); + } else { + Log.d(TAG, "dataChannelClose() dataChannel is null"); + } } - } - void dataChannelSend(int dataChannelId, ByteBuffer byteBuffer, Boolean isBinary) { - DataChannel dataChannel = dataChannels.get(dataChannelId); - if (dataChannel != null) { - DataChannel.Buffer buffer = new DataChannel.Buffer(byteBuffer, isBinary); - dataChannel.send(buffer); - } else { - Log.d(TAG, "dataChannelSend() dataChannel is null"); + void dataChannelSend(int dataChannelId, ByteBuffer byteBuffer, Boolean isBinary) { + DataChannel dataChannel = dataChannels.get(dataChannelId); + if (dataChannel != null) { + DataChannel.Buffer buffer = new DataChannel.Buffer(byteBuffer, isBinary); + dataChannel.send(buffer); + } else { + Log.d(TAG, "dataChannelSend() dataChannel is null"); + } + } + + RtpTransceiver getRtpTransceiverById(String id) { + List transceivers = peerConnection.getTransceivers(); + for(RtpTransceiver transceiver : transceivers) { + if (id == transceiver.getMid()){ + return transceiver; + } + } + return null; + } + + RtpSender getRtpSenderById(String id) { + List senders = peerConnection.getSenders(); + for(RtpSender sender : senders) { + if (id == sender.id()){ + return sender; + } + } + return null; + } + + RtpReceiver getRtpReceiverById(String id) { + List receivers = peerConnection.getReceivers(); + for(RtpReceiver receiver : receivers) { + if (id == receiver.id()){ + return receiver; + } + } + return null; } - } void getStats(String trackId, final Result result) { MediaStreamTrack track = null; @@ -184,10 +222,7 @@ public void onComplete(StatsReport[] reports) { }, track); } else { - Log.e(TAG, "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId); - result.error("peerConnectionGetStats", - "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId, - null); + resultError("peerConnectionGetStats","MediaStreamTrack not found for id: " + trackId, result); } } @@ -196,14 +231,24 @@ public void onIceCandidate(final IceCandidate candidate) { Log.d(TAG, "onIceCandidate"); ConstraintsMap params = new ConstraintsMap(); params.putString("event", "onCandidate"); - ConstraintsMap candidateParams = new ConstraintsMap(); - candidateParams.putInt("sdpMLineIndex", candidate.sdpMLineIndex); - candidateParams.putString("sdpMid", candidate.sdpMid); - candidateParams.putString("candidate", candidate.sdp); - params.putMap("candidate", candidateParams.toMap()); + params.putMap("candidate", candidateToMap(candidate)); sendEvent(params); } + @Override + public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) { + Log.d(TAG, "onSelectedCandidatePairChanged"); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onSelectedCandidatePairChanged"); + ConstraintsMap candidateParams = new ConstraintsMap(); + candidateParams.putInt("lastDataReceivedMs", event.lastDataReceivedMs); + candidateParams.putMap("local", candidateToMap(event.local)); + candidateParams.putMap("remote", candidateToMap(event.remote)); + candidateParams.putString("reason", event.reason); + params.putMap("candidate", candidateParams.toMap()); + sendEvent(params); + } + @Override public void onIceCandidatesRemoved(final IceCandidate[] candidates) { Log.d(TAG, "onIceCandidatesRemoved"); @@ -267,6 +312,7 @@ public void onAddStream(MediaStream mediaStream) { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "onAddStream"); params.putString("streamId", streamId); + params.putString("ownerTag", id); ConstraintsArray audioTracks = new ConstraintsArray(); ConstraintsArray videoTracks = new ConstraintsArray(); @@ -333,28 +379,57 @@ public void onRemoveStream(MediaStream mediaStream) { sendEvent(params); } + @Override + public void onTrack(RtpTransceiver transceiver) { + /* + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onTrack"); + params.putMap("transceiver", transceiverToMap(transceiver)); + params.putMap("receiver", rtpReceiverToMap(transceiver.getReceiver())); + params.putMap("track", mediaTrackToMap(transceiver.getReceiver().track())); + params.putArray("streams", new ConstraintsArray().toArrayList()); + sendEvent(params); + */ + } + @Override public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) { - Log.d(TAG, "onAddTrack"); - for (MediaStream stream : mediaStreams) { - String streamId = stream.getId(); - MediaStreamTrack track = receiver.track(); + Log.d(TAG, "onAddTrack"); + // for plan-b + for (MediaStream stream : mediaStreams) { + String streamId = stream.getId(); + MediaStreamTrack track = receiver.track(); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onAddTrack"); + params.putString("streamId", streamId); + params.putString("ownerTag", id); + params.putString("trackId", track.id()); + + String trackId = track.id(); + ConstraintsMap trackInfo = new ConstraintsMap(); + trackInfo.putString("id", trackId); + trackInfo.putString("label", track.kind()); + trackInfo.putString("kind", track.kind()); + trackInfo.putBoolean("enabled", track.enabled()); + trackInfo.putString("readyState", track.state().toString()); + trackInfo.putBoolean("remote", true); + params.putMap("track", trackInfo.toMap()); + sendEvent(params); + } + + // For unified-plan ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onAddTrack"); - params.putString("streamId", streamId); - params.putString("trackId", track.id()); + ConstraintsArray streams = new ConstraintsArray(); + for(int i = 0; i< mediaStreams.length; i++){ + MediaStream stream = mediaStreams[i]; + streams.pushMap(new ConstraintsMap(mediaStreamToMap(stream))); + } - String trackId = track.id(); - ConstraintsMap trackInfo = new ConstraintsMap(); - trackInfo.putString("id", trackId); - trackInfo.putString("label", track.kind()); - trackInfo.putString("kind", track.kind()); - trackInfo.putBoolean("enabled", track.enabled()); - trackInfo.putString("readyState", track.state().toString()); - trackInfo.putBoolean("remote", true); - params.putMap("track", trackInfo.toMap()); + params.putString("event", "onTrack"); + params.putArray("streams", streams.toArrayList()); + params.putMap("track", mediaTrackToMap(receiver.track())); + params.putMap("receiver", rtpReceiverToMap(receiver)); sendEvent(params); - } } @Override @@ -416,7 +491,16 @@ public void onSignalingChange(PeerConnection.SignalingState signalingState) { sendEvent(params); } - @Nullable + @Override + public void onConnectionChange(PeerConnection.PeerConnectionState connectionState) { + Log.d(TAG, "onConnectionChange" + connectionState.name()); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "peerConnectionState"); + params.putString("state", connectionStateString(connectionState)); + sendEvent(params); + } + + @Nullable private String iceConnectionStateString(PeerConnection.IceConnectionState iceConnectionState) { switch (iceConnectionState) { case NEW: @@ -468,4 +552,386 @@ private String signalingStateString(PeerConnection.SignalingState signalingState } return null; } + + @Nullable + private String connectionStateString(PeerConnection.PeerConnectionState connectionState) { + switch (connectionState) { + case NEW: + return "new"; + case CONNECTING: + return "connecting"; + case CONNECTED: + return "connected"; + case DISCONNECTED: + return "disconnected"; + case FAILED: + return "failed"; + case CLOSED: + return "closed"; + } + return null; + } + + @Nullable + private String transceiverDirectionString(RtpTransceiver.RtpTransceiverDirection direction) { + switch (direction) { + case SEND_RECV: + return "sendrecv"; + case SEND_ONLY: + return "sendonly"; + case RECV_ONLY: + return "recvonly"; + case INACTIVE: + return "inactive"; + } + return null; + } + + private RtpTransceiver.RtpTransceiverDirection stringToTransceiverDirection(String direction) { + switch (direction) { + case "sendrecv": + return RtpTransceiver.RtpTransceiverDirection.SEND_RECV; + case "sendonly": + return RtpTransceiver.RtpTransceiverDirection.SEND_ONLY; + case "recvonly": + return RtpTransceiver.RtpTransceiverDirection.RECV_ONLY; + case "inactive": + return RtpTransceiver.RtpTransceiverDirection.INACTIVE; + } + return RtpTransceiver.RtpTransceiverDirection.INACTIVE; + } + + private MediaStreamTrack.MediaType stringToMediaType(String mediaType) { + MediaStreamTrack.MediaType type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + if(mediaType.equals("audio")) + type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + else if(mediaType.equals("video")) + type = MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO; + return type; + } + + private RtpParameters.Encoding mapToEncoding(Map parameters) { + RtpParameters.Encoding encoding = new RtpParameters.Encoding((String)parameters.get("rid"), true, 1.0); + + if( parameters.get("active") != null) { + encoding.active = (Boolean) parameters.get("active"); + } + + if( parameters.get("ssrc") != null) { + encoding.ssrc = ((Integer) parameters.get("ssrc")).longValue(); + } + + if( parameters.get("minBitrateBps") != null) { + encoding.minBitrateBps = (Integer) parameters.get("minBitrateBps"); + } + + if( parameters.get("maxBitrateBps") != null) { + encoding.maxBitrateBps = (Integer) parameters.get("maxBitrateBps"); + } + + if( parameters.get("maxFramerate") != null) { + encoding.maxFramerate = (Integer) parameters.get("maxFramerate"); + } + + if( parameters.get("numTemporalLayers") != null) { + encoding.numTemporalLayers = (Integer) parameters.get("numTemporalLayers"); + } + + if( parameters.get("scaleResolutionDownBy") != null) { + encoding.scaleResolutionDownBy = (Double) parameters.get("scaleResolutionDownBy"); + } + + return encoding; + } + + private RtpTransceiver.RtpTransceiverInit mapToRtpTransceiverInit(Map parameters) { + List streamIds = (List)parameters.get("streamIds"); + List> encodingsParams = (List>)parameters.get("sendEncodings"); + String direction = (String)parameters.get("direction"); + List sendEncodings = new ArrayList<>(); + RtpTransceiver.RtpTransceiverInit init = null; + if(encodingsParams != null) { + for (int i=0;i< encodingsParams.size();i++){ + Map params = encodingsParams.get(i); + sendEncodings.add(mapToEncoding(params)); + } + init = new RtpTransceiver.RtpTransceiverInit(stringToTransceiverDirection(direction) ,streamIds, sendEncodings); + } else { + init = new RtpTransceiver.RtpTransceiverInit(stringToTransceiverDirection(direction) ,streamIds); + } + return init; + } + + private RtpParameters MapToRtpParameters(Map parameters) { + RtpParameters rtpParameters = null; + return rtpParameters; + } + + private Map rtpParametersToMap(RtpParameters rtpParameters){ + ConstraintsMap info = new ConstraintsMap(); + info.putString("transactionId", rtpParameters.transactionId); + + ConstraintsMap rtcp = new ConstraintsMap(); + rtcp.putString("cname", rtpParameters.getRtcp().getCname()); + rtcp.putBoolean("reducedSize", rtpParameters.getRtcp().getReducedSize()); + info.putMap("rtcp", rtcp.toMap()); + + ConstraintsArray headerExtensions = new ConstraintsArray(); + for(RtpParameters.HeaderExtension extension : rtpParameters.getHeaderExtensions()){ + ConstraintsMap map = new ConstraintsMap(); + map.putString("uri",extension.getUri()); + map.putInt("id", extension.getId()); + map.putBoolean("encrypted", extension.getEncrypted()); + headerExtensions.pushMap(map); + } + info.putArray("headerExtensions", headerExtensions.toArrayList()); + + ConstraintsArray encodings = new ConstraintsArray(); + for(RtpParameters.Encoding encoding : rtpParameters.encodings){ + ConstraintsMap map = new ConstraintsMap(); + map.putBoolean("active",encoding.active); + if (encoding.maxBitrateBps != null) { + map.putInt("maxBitrateBps", encoding.maxBitrateBps); + } + if (encoding.minBitrateBps != null) { + map.putInt("minBitrateBps", encoding.minBitrateBps); + } + if (encoding.maxFramerate != null) { + map.putInt("maxFramerate", encoding.maxFramerate); + } + if (encoding.numTemporalLayers != null) { + map.putInt("numTemporalLayers", encoding.numTemporalLayers); + } + if (encoding.scaleResolutionDownBy != null) { + map.putDouble("scaleResolutionDownBy", encoding.scaleResolutionDownBy); + } + if (encoding.ssrc != null) { + map.putLong("ssrc", encoding.ssrc); + } + encodings.pushMap(map); + } + info.putArray("encodings", encodings.toArrayList()); + + ConstraintsArray codecs = new ConstraintsArray(); + for(RtpParameters.Codec codec : rtpParameters.codecs){ + ConstraintsMap map = new ConstraintsMap(); + map.putString("name",codec.name); + map.putInt("payloadType", codec.payloadType); + map.putInt("clockRate", codec.clockRate); + if (codec.numChannels != null) { + map.putInt("numChannels", codec.numChannels); + } + map.putMap("parameters", new HashMap(codec.parameters)); + try { + Field field = codec.getClass().getDeclaredField("kind"); + field.setAccessible(true); + if (field.get(codec).equals(MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO)) { + map.putString("kind", "audio"); + } else if(field.get(codec).equals(MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO)) { + map.putString("kind", "video"); + } + } catch (NoSuchFieldException e1) { + e1.printStackTrace(); + } catch (IllegalArgumentException e1) { + e1.printStackTrace(); + } catch (IllegalAccessException e1) { + e1.printStackTrace(); + } + codecs.pushMap(map); + } + + info.putArray("codecs", codecs.toArrayList()); + return info.toMap(); + } + + @Nullable + private Map mediaStreamToMap(MediaStream stream){ + ConstraintsMap params = new ConstraintsMap(); + params.putString("streamId", stream.getId()); + params.putString("ownerTag", id); + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); + + for (MediaStreamTrack track : stream.audioTracks) { + audioTracks.pushMap(new ConstraintsMap(mediaTrackToMap(track))); + } + + for (MediaStreamTrack track : stream.videoTracks) { + videoTracks.pushMap(new ConstraintsMap(mediaTrackToMap(track))); + } + + params.putArray("audioTracks", audioTracks.toArrayList()); + params.putArray("videoTracks", videoTracks.toArrayList()); + return params.toMap(); + } + + @Nullable + private Map mediaTrackToMap(MediaStreamTrack track){ + ConstraintsMap info = new ConstraintsMap(); + if(track != null){ + info.putString("trackId", track.id()); + info.putString("label",track.getClass() == VideoTrack.class? "video": "audio"); + info.putString("kind",track.kind()); + info.putBoolean("enabled", track.enabled()); + info.putString("readyState", track.state().toString()); + } + return info.toMap(); + } + + private Map dtmfSenderToMap(DtmfSender dtmfSender, String id){ + ConstraintsMap info = new ConstraintsMap(); + info.putString("dtmfSenderId",id); + if (dtmfSender != null) { + info.putInt("interToneGap", dtmfSender.interToneGap()); + info.putInt("duration", dtmfSender.duration()); + } + return info.toMap(); + } + + private Map rtpSenderToMap(RtpSender sender){ + ConstraintsMap info = new ConstraintsMap(); + info.putString("senderId", sender.id()); + info.putBoolean("ownsTrack", true); + info.putMap("dtmfSender", dtmfSenderToMap(sender.dtmf(), sender.id())); + info.putMap("rtpParameters", rtpParametersToMap(sender.getParameters())); + info.putMap("track", mediaTrackToMap(sender.track())); + return info.toMap(); + } + + private Map rtpReceiverToMap(RtpReceiver receiver){ + ConstraintsMap info = new ConstraintsMap(); + info.putString("receiverId", receiver.id()); + info.putMap("rtpParameters", rtpParametersToMap(receiver.getParameters())); + info.putMap("track", mediaTrackToMap(receiver.track())); + return info.toMap(); + } + + Map transceiverToMap(RtpTransceiver transceiver){ + ConstraintsMap info = new ConstraintsMap(); + info.putString("transceiverId", transceiver.getMid()); + info.putString("mid", transceiver.getMid()); + info.putString("direction", transceiverDirectionString(transceiver.getDirection())); + info.putMap("sender", rtpSenderToMap(transceiver.getSender())); + info.putMap("receiver", rtpReceiverToMap(transceiver.getReceiver())); + return info.toMap(); + } + + Map candidateToMap(IceCandidate candidate) { + ConstraintsMap candidateParams = new ConstraintsMap(); + candidateParams.putInt("sdpMLineIndex", candidate.sdpMLineIndex); + candidateParams.putString("sdpMid", candidate.sdpMid); + candidateParams.putString("candidate", candidate.sdp); + return candidateParams.toMap(); + } + + public void createSender(String kind, String streamId, Result result){ + RtpSender sender = peerConnection.createSender(kind, streamId); + result.success(rtpSenderToMap(sender)); + } + + public void closeSender(String senderId, Result result) { + RtpSender sender = getRtpSenderById(senderId); + sender.dispose(); + Map params = new HashMap<>(); + params.put("result", true); + result.success(params); + } + + public void addTrack(MediaStreamTrack track, List streamIds, Result result){ + RtpSender sender = peerConnection.addTrack(track, streamIds); + result.success(rtpSenderToMap(sender)); + } + + public void removeTrack(String senderId, Result result){ + RtpSender sender = getRtpSenderById(senderId); + if(sender == null){ + resultError("removeTrack", "sender is null", result); + return; + } + boolean res = peerConnection.removeTrack(sender); + Map params = new HashMap<>(); + params.put("result", res); + result.success(params); + } + + public void addTransceiver(MediaStreamTrack track, Map transceiverInit, Result result) { + RtpTransceiver transceiver; + if(transceiverInit != null){ + transceiver = peerConnection.addTransceiver(track, mapToRtpTransceiverInit(transceiverInit)); + } else { + transceiver = peerConnection.addTransceiver(track); + } + result.success(transceiverToMap(transceiver)); + } + + public void addTransceiverOfType(String mediaType, Map transceiverInit, Result result) { + RtpTransceiver transceiver; + if(transceiverInit != null){ + transceiver = peerConnection.addTransceiver(stringToMediaType(mediaType), mapToRtpTransceiverInit(transceiverInit)); + } else { + transceiver = peerConnection.addTransceiver(stringToMediaType(mediaType)); + } + result.success(transceiverToMap(transceiver)); + } + + public void rtpTransceiverSetDirection(String direction, String transceiverId, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverSetDirection", "transceiver is null", result); + return; + } + transceiver.setDirection(stringToTransceiverDirection(direction)); + result.success(null); + } + + public void rtpTransceiverGetCurrentDirection(String transceiverId, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverGetCurrentDirection", "transceiver is null", result); + return; + } + ConstraintsMap params = new ConstraintsMap(); + params.putString("result", transceiverDirectionString(transceiver.getDirection())); + result.success(params.toMap()); + } + + public void rtpTransceiverStop(String transceiverId, Result result) { + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverStop", "transceiver is null", result); + return; + } + transceiver.stop(); + result.success(null); + } + + public void rtpSenderSetParameters(String rtpSenderId, Map parameters, Result result) { + RtpSender sender = getRtpSenderById(rtpSenderId); + if (sender == null) { + resultError("rtpSenderSetParameters", "sender is null", result); + return; + } + sender.setParameters(MapToRtpParameters(parameters)); + result.success(null); + } + + public void rtpSenderSetTrack(String rtpSenderId, MediaStreamTrack track, Result result, boolean replace) { + RtpSender sender = getRtpSenderById(rtpSenderId); + if (sender == null) { + resultError("rtpSenderSetTrack", "sender is null", result); + return; + } + sender.setTrack(track, replace ); + result.success(null); + } + + public void rtpSenderDispose(String rtpSenderId, Result result) { + RtpSender sender = getRtpSenderById(rtpSenderId); + if (sender == null) { + resultError("rtpSenderDispose", "sender is null", result); + return; + } + sender.dispose(); + result.success(null); + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java index eb16e20697..08291dcc50 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java @@ -89,6 +89,10 @@ public void putInt(String key, int value) { mMap.put(key, value); } + public void putLong(String key, long value) { + mMap.put(key, value); + } + public void putString(String key, String value) { mMap.put(key, value); } diff --git a/common/darwin/Classes/FlutterRPScreenRecorder.h b/common/darwin/Classes/FlutterRPScreenRecorder.h new file mode 100644 index 0000000000..8b3bec13c1 --- /dev/null +++ b/common/darwin/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1,10 @@ +#import +#if TARGET_OS_IPHONE +@interface FlutterRPScreenRecorder : RTCVideoCapturer + +-(void)startCapture; + +-(void)stopCapture; + +@end +#endif diff --git a/common/darwin/Classes/FlutterRPScreenRecorder.m b/common/darwin/Classes/FlutterRPScreenRecorder.m new file mode 100644 index 0000000000..fb421b7b52 --- /dev/null +++ b/common/darwin/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1,74 @@ +#import "FlutterRPScreenRecorder.h" +#if TARGET_OS_IPHONE +#import + +//See: https://developer.apple.com/videos/play/wwdc2017/606/ + +@implementation FlutterRPScreenRecorder { + RPScreenRecorder *screenRecorder; + RTCVideoSource *source; +} + +- (instancetype)initWithDelegate:(__weak id)delegate { + source = delegate; + return [super initWithDelegate:delegate]; +} + +-(void)startCapture +{ + if(screenRecorder == NULL) + screenRecorder = [RPScreenRecorder sharedRecorder]; + + [screenRecorder setMicrophoneEnabled:NO]; + + if (![screenRecorder isAvailable]) { + NSLog(@"Screen recorder is not available!"); + return; + } + + [screenRecorder startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) { + if (bufferType == RPSampleBufferTypeVideo) {// We want video only now + [self handleSourceBuffer:sampleBuffer sampleType:bufferType]; + } + } completionHandler:^(NSError * _Nullable error) { + if (error != nil) + NSLog(@"!!! startCaptureWithHandler/completionHandler %@ !!!", error); + }]; +} + +-(void)stopCapture +{ + [screenRecorder stopCaptureWithHandler:^(NSError * _Nullable error) { + if (error != nil) + NSLog(@"!!! stopCaptureWithHandler/completionHandler %@ !!!", error); + }]; +} + +-(void)handleSourceBuffer:(CMSampleBufferRef)sampleBuffer sampleType:(RPSampleBufferType)sampleType +{ + if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || + !CMSampleBufferDataIsReady(sampleBuffer)) { + return; + } + + CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + if (pixelBuffer == nil) { + return; + } + + size_t width = CVPixelBufferGetWidth(pixelBuffer); + size_t height = CVPixelBufferGetHeight(pixelBuffer); + + [source adaptOutputFormatToWidth:width/2 height:height/2 fps:8]; + + RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + int64_t timeStampNs = + CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC; + RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer + rotation:RTCVideoRotation_0 + timeStampNs:timeStampNs]; + [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; +} + +@end +#endif diff --git a/common/darwin/Classes/FlutterRTCDataChannel.h b/common/darwin/Classes/FlutterRTCDataChannel.h new file mode 100755 index 0000000000..c2e039f072 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDataChannel.h @@ -0,0 +1,28 @@ +#import "FlutterWebRTCPlugin.h" +#import + +@interface RTCDataChannel (Flutter) +@property (nonatomic, strong) NSString *peerConnectionId; +@property (nonatomic, strong) NSNumber *flutterChannelId; +@property (nonatomic, strong) FlutterEventSink eventSink; +@property (nonatomic, strong) FlutterEventChannel* eventChannel; +@end + +@interface FlutterWebRTCPlugin (RTCDataChannel) + + +-(void)createDataChannel:(nonnull NSString *)peerConnectionId + label:(nonnull NSString *)label + config:(nonnull RTCDataChannelConfiguration *)config + messenger:(NSObject*)messenger; + +-(void)dataChannelClose:(nonnull NSString *)peerConnectionId + dataChannelId:(nonnull NSString *)dataChannelId; + + +-(void)dataChannelSend:(nonnull NSString *)peerConnectionId + dataChannelId:(nonnull NSString *)dataChannelId + data:(nonnull NSString *)data + type:(nonnull NSString *)type; + +@end diff --git a/common/darwin/Classes/FlutterRTCDataChannel.m b/common/darwin/Classes/FlutterRTCDataChannel.m new file mode 100755 index 0000000000..e408c53076 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDataChannel.m @@ -0,0 +1,165 @@ +#import +#import "FlutterRTCDataChannel.h" +#import "FlutterRTCPeerConnection.h" +#import + +@implementation RTCDataChannel (Flutter) + +- (NSString *)peerConnectionId +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setPeerConnectionId:(NSString *)peerConnectionId +{ + objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventSink )eventSink +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink +{ + objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSNumber *)flutterChannelId +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setFlutterChannelId:(NSNumber *)flutterChannelId +{ + objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel *)eventChannel +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel *)eventChannel +{ + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (RTCDataChannel) + +-(void)createDataChannel:(nonnull NSString *)peerConnectionId + label:(NSString *)label + config:(RTCDataChannelConfiguration *)config + messenger:(NSObject*)messenger +{ + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel *dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; + + if (nil != dataChannel) { + dataChannel.peerConnectionId = peerConnectionId; + NSNumber *dataChannelId = [NSNumber numberWithInteger:config.channelId]; + peerConnection.dataChannels[dataChannelId] = dataChannel; + dataChannel.flutterChannelId = dataChannelId; + dataChannel.delegate = self; + + FlutterEventChannel *eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnectionId, [dataChannelId intValue]] + binaryMessenger:messenger]; + + dataChannel.eventChannel = eventChannel; + [eventChannel setStreamHandler:dataChannel]; + } +} + +-(void)dataChannelClose:(nonnull NSString *)peerConnectionId + dataChannelId:(nonnull NSString *)dataChannelId +{ + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + NSMutableDictionary *dataChannels = peerConnection.dataChannels; + RTCDataChannel *dataChannel = dataChannels[dataChannelId]; + FlutterEventChannel *eventChannel = dataChannel.eventChannel; + [eventChannel setStreamHandler:nil]; + dataChannel.eventChannel = nil; + [dataChannel close]; + [dataChannels removeObjectForKey:dataChannelId]; +} + +-(void)dataChannelSend:(nonnull NSString *)peerConnectionId + dataChannelId:(nonnull NSString *)dataChannelId + data:(id)data + type:(NSString *)type +{ + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel *dataChannel = peerConnection.dataChannels[dataChannelId]; + + NSData *bytes = [type isEqualToString:@"binary"] ? + ((FlutterStandardTypedData*)data).data : + [data dataUsingEncoding:NSUTF8StringEncoding]; + + RTCDataBuffer *buffer = [[RTCDataBuffer alloc] initWithData:bytes isBinary:[type isEqualToString:@"binary"]]; + [dataChannel sendData:buffer]; +} + +- (NSString *)stringForDataChannelState:(RTCDataChannelState)state +{ + switch (state) { + case RTCDataChannelStateConnecting: return @"connecting"; + case RTCDataChannelStateOpen: return @"open"; + case RTCDataChannelStateClosing: return @"closing"; + case RTCDataChannelStateClosed: return @"closed"; + } + return nil; +} + +#pragma mark - RTCDataChannelDelegate methods + +// Called when the data channel state has changed. +- (void)dataChannelDidChangeState:(RTCDataChannel*)channel +{ + RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; + FlutterEventSink eventSink = channel.eventSink; + if(eventSink) { + eventSink(@{ @"event" : @"dataChannelStateChanged", + @"id": channel.flutterChannelId, + @"state": [self stringForDataChannelState:channel.readyState]}); + } +} + +// Called when a data buffer was successfully received. +- (void)dataChannel:(RTCDataChannel *)channel didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer +{ + NSString *type; + id data; + if (buffer.isBinary) { + type = @"binary"; + data = [FlutterStandardTypedData typedDataWithBytes:buffer.data]; + } else { + type = @"text"; + data = [[NSString alloc] initWithData:buffer.data + encoding:NSUTF8StringEncoding]; + } + RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; + FlutterEventSink eventSink = channel.eventSink; + if(eventSink) { + eventSink(@{ @"event" : @"dataChannelReceiveMessage", + @"id": channel.flutterChannelId, + @"type": type, + @"data": (data ? data : [NSNull null])}); + } +} + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.h b/common/darwin/Classes/FlutterRTCFrameCapturer.h new file mode 100644 index 0000000000..a3ae4f8ab0 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1,12 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_MAC +#import +#endif +#import + +@interface FlutterRTCFrameCapturer : NSObject + +- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result; + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.m b/common/darwin/Classes/FlutterRTCFrameCapturer.m new file mode 100644 index 0000000000..d06444077b --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1,89 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_MAC +#import +#endif + + +#import "FlutterRTCFrameCapturer.h" + +#include "libyuv.h" + +@import CoreImage; +@import CoreVideo; + +@implementation FlutterRTCFrameCapturer { + RTCVideoTrack* _track; + NSString* _path; + FlutterResult _result; + bool _gotFrame; +} + +- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result +{ + self = [super init]; + if (self) { + _gotFrame = false; + _track = track; + _path = path; + _result = result; + [track addRenderer:self]; + } + return self; +} + +- (void)setSize:(CGSize)size +{ +} + +- (void)renderFrame:(nullable RTCVideoFrame *)frame +{ +#if TARGET_OS_IPHONE + if (_gotFrame || frame == nil) return; + _gotFrame = true; + + id buffer = frame.buffer; + CVPixelBufferRef pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; + + CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; + CIContext *context = [CIContext contextWithOptions:nil]; + CGImageRef cgImage = [context createCGImage:ciImage + fromRect:CGRectMake(0, 0, frame.width, frame.height)]; + + UIImageOrientation orientation; + switch (frame.rotation) { + case RTCVideoRotation_90: + orientation = UIImageOrientationRight; + break; + case RTCVideoRotation_180: + orientation = UIImageOrientationDown; + break; + case RTCVideoRotation_270: + orientation = UIImageOrientationLeft; + default: + orientation = UIImageOrientationUp; + break; + } + + UIImage *uiImage = [UIImage imageWithCGImage:cgImage scale:1 orientation:orientation]; + CGImageRelease(cgImage); + NSData *jpgData = UIImageJPEGRepresentation(uiImage, 0.9f); + + if ([jpgData writeToFile:_path atomically:NO]) { + NSLog(@"File writed successfully to %@", _path); + _result(nil); + } else { + NSLog(@"Failed to write to file"); + _result([FlutterError errorWithCode:@"CaptureFrameFailed" + message:@"Failed to write JPEG data to file" + details:nil]); + } + + dispatch_async(dispatch_get_main_queue(), ^{ + [self->_track removeRenderer:self]; + self->_track = nil; + }); +#endif +} + +@end diff --git a/common/darwin/Classes/FlutterRTCMediaStream.h b/common/darwin/Classes/FlutterRTCMediaStream.h new file mode 100644 index 0000000000..12f1633cde --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaStream.h @@ -0,0 +1,29 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (RTCMediaStream) + +-(void)getUserMedia:(NSDictionary *)constraints + result:(FlutterResult)result; + +-(void)getDisplayMedia:(NSDictionary *)constraints + result:(FlutterResult)result; + +-(void)createLocalMediaStream:(FlutterResult)result; + +-(void)getSources:(FlutterResult)result; + +-(void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack *)track + result:(FlutterResult) result; + +-(void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack *)track + torch:(BOOL) torch + result:(FlutterResult) result; + +-(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track + result:(FlutterResult) result; + +-(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track + toPath:(NSString *) path + result:(FlutterResult) result; +@end diff --git a/common/darwin/Classes/FlutterRTCMediaStream.m b/common/darwin/Classes/FlutterRTCMediaStream.m new file mode 100755 index 0000000000..c48ae37b25 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaStream.m @@ -0,0 +1,617 @@ +#import + +#import + +#import "FlutterRTCFrameCapturer.h" +#import "FlutterRTCMediaStream.h" +#import "FlutterRTCPeerConnection.h" + +#if TARGET_OS_IPHONE +#import "FlutterRPScreenRecorder.h" +#endif + +@implementation AVCaptureDevice (Flutter) + +- (NSString*)positionString { + switch (self.position) { + case AVCaptureDevicePositionUnspecified: return @"unspecified"; + case AVCaptureDevicePositionBack: return @"back"; + case AVCaptureDevicePositionFront: return @"front"; + } + return nil; +} + +@end + +@implementation FlutterWebRTCPlugin (RTCMediaStream) + +/** + * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} + */ +typedef void (^NavigatorUserMediaErrorCallback)(NSString *errorType, NSString *errorMessage); + +/** + * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} + */ +typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream *mediaStream); + +- (RTCMediaConstraints *)defaultMediaStreamConstraints { + NSDictionary *mandatoryConstraints + = @{ @"minWidth" : @"1280", + @"minHeight" : @"720", + @"minFrameRate" : @"30" }; + RTCMediaConstraints* constraints = + [[RTCMediaConstraints alloc] + initWithMandatoryConstraints:mandatoryConstraints + optionalConstraints:nil]; + return constraints; +} + +/** + * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, + * adds it to a specific {@link RTCMediaStream}, and reports success to a + * specific callback. Implements the audio-specific counterpart of the + * {@code getUserMedia()} algorithm. + * + * @param constraints The {@code MediaStreamConstraints} which the new + * {@code RTCAudioTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm, to which a + * new {@code RTCAudioTrack} is to be added, and which is to be reported to + * {@code successCallback} upon success. + */ +- (void)getUserAudio:(NSDictionary *)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream *)mediaStream { + NSString *trackId = [[NSUUID UUID] UUIDString]; + RTCAudioTrack *audioTrack + = [self.peerConnectionFactory audioTrackWithTrackId:trackId]; + + [mediaStream addAudioTrack:audioTrack]; + + successCallback(mediaStream); +} + +// TODO: Use RCTConvert for constraints ... +-(void)getUserMedia:(NSDictionary *)constraints + result:(FlutterResult) result { + // Initialize RTCMediaStream with a unique label in order to allow multiple + // RTCMediaStream instances initialized by multiple getUserMedia calls to be + // added to 1 RTCPeerConnection instance. As suggested by + // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good + // practice, use a UUID (conforming to RFC4122). + NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream *mediaStream + = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + [self + getUserMedia:constraints + successCallback:^ (RTCMediaStream *mediaStream) { + NSString *mediaStreamId = mediaStream.streamId; + + NSMutableArray *audioTracks = [NSMutableArray array]; + NSMutableArray *videoTracks = [NSMutableArray array]; + + for (RTCAudioTrack *track in mediaStream.audioTracks) { + [self.localTracks setObject:track forKey:track.trackId]; + [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; + } + + for (RTCVideoTrack *track in mediaStream.videoTracks) { + [self.localTracks setObject:track forKey:track.trackId]; + [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; + } + + self.localStreams[mediaStreamId] = mediaStream; + result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); + } + errorCallback:^ (NSString *errorType, NSString *errorMessage) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] + message:errorMessage + details:nil]); + } + mediaStream:mediaStream]; +} + +/** + * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which + * satisfies specific constraints and adds it to a specific + * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track + * of the respective media type and the specified {@code constraints} specify + * that a track of the respective media type is required; otherwise, reports + * success for the specified {@code mediaStream} to a specific + * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media + * type-specific iteration of or successfully concludes the + * {@code getUserMedia()} algorithm. The method will be recursively invoked to + * conclude the whole {@code getUserMedia()} algorithm either with (successful) + * satisfaction of the specified {@code constraints} or with failure. + * + * @param constraints The {@code MediaStreamConstraints} which specifies the + * requested media types and which the new {@code RTCAudioTrack} or + * {@code RTCVideoTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm. + */ +- (void)getUserMedia:(NSDictionary *)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream *)mediaStream { + // If mediaStream contains no audioTracks and the constraints request such a + // track, then run an iteration of the getUserMedia() algorithm to obtain + // local audio content. + if (mediaStream.audioTracks.count == 0) { + // constraints.audio + id audioConstraints = constraints[@"audio"]; + BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; + if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { + [self requestAccessForMediaType:AVMediaTypeAudio + constraints:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + return; + } + } + + // If mediaStream contains no videoTracks and the constraints request such a + // track, then run an iteration of the getUserMedia() algorithm to obtain + // local video content. + if (mediaStream.videoTracks.count == 0) { + // constraints.video + id videoConstraints = constraints[@"video"]; + if (videoConstraints) { + BOOL requestAccessForVideo + = [videoConstraints isKindOfClass:[NSNumber class]] + ? [videoConstraints boolValue] + : [videoConstraints isKindOfClass:[NSDictionary class]]; +#if !TARGET_IPHONE_SIMULATOR + if (requestAccessForVideo) { + [self requestAccessForMediaType:AVMediaTypeVideo + constraints:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + return; + } +#endif + } + } + + // There are audioTracks and/or videoTracks in mediaStream as requested by + // constraints so the getUserMedia() is to conclude with success. + successCallback(mediaStream); +} + +/** + * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, + * adds it to a specific {@link RTCMediaStream}, and reports success to a + * specific callback. Implements the video-specific counterpart of the + * {@code getUserMedia()} algorithm. + * + * @param constraints The {@code MediaStreamConstraints} which the new + * {@code RTCVideoTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm, to which a + * new {@code RTCVideoTrack} is to be added, and which is to be reported to + * {@code successCallback} upon success. + */ +- (void)getUserVideo:(NSDictionary *)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream *)mediaStream { + id videoConstraints = constraints[@"video"]; + AVCaptureDevice *videoDevice; + if ([videoConstraints isKindOfClass:[NSDictionary class]]) { + // constraints.video.optional + id optionalVideoConstraints = videoConstraints[@"optional"]; + if (optionalVideoConstraints + && [optionalVideoConstraints isKindOfClass:[NSArray class]]) { + NSArray *options = optionalVideoConstraints; + for (id item in options) { + if ([item isKindOfClass:[NSDictionary class]]) { + NSString *sourceId = ((NSDictionary *)item)[@"sourceId"]; + if (sourceId) { + videoDevice = [AVCaptureDevice deviceWithUniqueID:sourceId]; + if (videoDevice) { + break; + } + } + } + } + } + if (!videoDevice) { + // constraints.video.facingMode + // + // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode + id facingMode = videoConstraints[@"facingMode"]; + if (facingMode && [facingMode isKindOfClass:[NSString class]]) { + AVCaptureDevicePosition position; + if ([facingMode isEqualToString:@"environment"]) { + self._usingFrontCamera = NO; + position = AVCaptureDevicePositionBack; + } else if ([facingMode isEqualToString:@"user"]) { + self._usingFrontCamera = YES; + position = AVCaptureDevicePositionFront; + } else { + // If the specified facingMode value is not supported, fall back to + // the default video device. + self._usingFrontCamera = NO; + position = AVCaptureDevicePositionUnspecified; + } + videoDevice = [self findDeviceForPosition:position]; + } + } + if (!videoDevice) { + videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + } + + //TODO(rostopira): refactor to separate function and add support for max + + self._targetWidth = 1280; + self._targetHeight = 720; + self._targetFps = 30; + + id mandatory = videoConstraints[@"mandatory"]; + // constraints.video.mandatory + if(mandatory && [mandatory isKindOfClass:[NSDictionary class]]) + { + id widthConstraint = mandatory[@"minWidth"]; + if ([widthConstraint isKindOfClass:[NSString class]]) { + int possibleWidth = [widthConstraint intValue]; + if (possibleWidth != 0) { + self._targetWidth = possibleWidth; + } + } + id heightConstraint = mandatory[@"minHeight"]; + if ([heightConstraint isKindOfClass:[NSString class]]) { + int possibleHeight = [heightConstraint intValue]; + if (possibleHeight != 0) { + self._targetHeight = possibleHeight; + } + } + id fpsConstraint = mandatory[@"minFrameRate"]; + if ([fpsConstraint isKindOfClass:[NSString class]]) { + int possibleFps = [fpsConstraint intValue]; + if (possibleFps != 0) { + self._targetFps = possibleFps; + } + } + } + + if (videoDevice) { + RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; + if (self.videoCapturer) { + [self.videoCapturer stopCapture]; + } + self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource]; + AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; + NSInteger selectedFps = [self selectFpsForFormat:selectedFormat]; + [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:selectedFps completionHandler:^(NSError *error) { + if (error) { + NSLog(@"Start capture error: %@", [error localizedDescription]); + } + }]; + + NSString *trackUUID = [[NSUUID UUID] UUIDString]; + RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; + [mediaStream addVideoTrack:videoTrack]; + + successCallback(mediaStream); + } else { + // According to step 6.2.3 of the getUserMedia() algorithm, if there is no + // source, fail with a new OverconstrainedError. + errorCallback(@"OverconstrainedError", /* errorMessage */ nil); + } +} + +-(void)mediaStreamRelease:(RTCMediaStream *)stream +{ + if (stream) { + for (RTCVideoTrack *track in stream.videoTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + for (RTCAudioTrack *track in stream.audioTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + [self.localStreams removeObjectForKey:stream.streamId]; + } +} + + +/** + * Obtains local media content of a specific type. Requests access for the + * specified {@code mediaType} if necessary. In other words, implements a media + * type-specific iteration of the {@code getUserMedia()} algorithm. + * + * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} + * which specifies the type of the local media content to obtain. + * @param constraints The {@code MediaStreamConstraints} which are to be + * satisfied by the obtained local media content. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is to collect the + * obtained local media content of the specified {@code mediaType}. + */ +- (void)requestAccessForMediaType:(NSString *)mediaType + constraints:(NSDictionary *)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream *)mediaStream { + // According to step 6.2.1 of the getUserMedia() algorithm, if there is no + // source, fail "with a new DOMException object whose name attribute has the + // value NotFoundError." + // XXX The following approach does not work for audio in Simulator. That is + // because audio capture is done using AVAudioSession which does not use + // AVCaptureDevice there. Anyway, Simulator will not (visually) request access + // for audio. + if (mediaType == AVMediaTypeVideo + && [AVCaptureDevice devicesWithMediaType:mediaType].count == 0) { + // Since successCallback and errorCallback are asynchronously invoked + // elsewhere, make sure that the invocation here is consistent. + dispatch_async(dispatch_get_main_queue(), ^ { + errorCallback(@"DOMException", @"NotFoundError"); + }); + return; + } + + [AVCaptureDevice + requestAccessForMediaType:mediaType + completionHandler:^ (BOOL granted) { + dispatch_async(dispatch_get_main_queue(), ^ { + if (granted) { + NavigatorUserMediaSuccessCallback scb + = ^ (RTCMediaStream *mediaStream) { + [self getUserMedia:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + }; + + if (mediaType == AVMediaTypeAudio) { + [self getUserAudio:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } else if (mediaType == AVMediaTypeVideo) { + [self getUserVideo:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } + } else { + // According to step 10 Permission Failure of the getUserMedia() + // algorithm, if the user has denied permission, fail "with a new + // DOMException object whose name attribute has the value + // NotAllowedError." + errorCallback(@"DOMException", @"NotAllowedError"); + } + }); + }]; +} + +#if TARGET_OS_IPHONE +-(void)getDisplayMedia:(NSDictionary *)constraints + result:(FlutterResult)result { + NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; + FlutterRPScreenRecorder *screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource]; + + [screenCapturer startCapture]; + + //TODO: + self.videoCapturer = screenCapturer; + + NSString *trackUUID = [[NSUUID UUID] UUIDString]; + RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; + [mediaStream addVideoTrack:videoTrack]; + + NSMutableArray *audioTracks = [NSMutableArray array]; + NSMutableArray *videoTracks = [NSMutableArray array]; + + for (RTCVideoTrack *track in mediaStream.videoTracks) { + [self.localTracks setObject:track forKey:track.trackId]; + [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; + } + + self.localStreams[mediaStreamId] = mediaStream; + result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); +} +#endif +-(void)createLocalMediaStream:(FlutterResult)result{ + NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + self.localStreams[mediaStreamId] = mediaStream; + result(@{@"streamId": [mediaStream streamId] }); +} + +-(void)getSources:(FlutterResult)result{ + NSMutableArray *sources = [NSMutableArray array]; + NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + for (AVCaptureDevice *device in videoDevices) { + [sources addObject:@{ + @"facing": device.positionString, + @"deviceId": device.uniqueID, + @"label": device.localizedName, + @"kind": @"videoinput", + }]; + } + NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; + for (AVCaptureDevice *device in audioDevices) { + [sources addObject:@{ + @"facing": @"", + @"deviceId": device.uniqueID, + @"label": device.localizedName, + @"kind": @"audioinput", + }]; + } + result(@{@"sources": sources}); +} + +-(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track +{ + // what's different to mediaStreamTrackStop? only call mediaStream explicitly? + if (mediaStream && track) { + track.isEnabled = NO; + // FIXME this is called when track is removed from the MediaStream, + // but it doesn't mean it can not be added back using MediaStream.addTrack + //TODO: [self.localTracks removeObjectForKey:trackID]; + if ([track.kind isEqualToString:@"audio"]) { + [mediaStream removeAudioTrack:(RTCAudioTrack *)track]; + } else if([track.kind isEqualToString:@"video"]) { + [mediaStream removeVideoTrack:(RTCVideoTrack *)track]; + } + } +} + +-(void)mediaStreamTrackSetEnabled:(RTCMediaStreamTrack *)track : (BOOL)enabled +{ + if (track && track.isEnabled != enabled) { + track.isEnabled = enabled; + } +} + +-(void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack *)track result:(FlutterResult) result +{ + if (!self.videoCapturer) { + result(@NO); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + result(@NO); + return; + } + + AVCaptureDeviceInput *deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice *device = deviceInput.device; + + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +} + +-(void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack *)track torch:(BOOL)torch result:(FlutterResult)result +{ + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't set torch"); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + NSLog(@"Video capturer is missing an input. Can't set torch"); + return; + } + + AVCaptureDeviceInput *deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice *device = deviceInput.device; + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + return; + } + + NSError *error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +-(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track result:(FlutterResult)result +{ + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't switch camera"); + return; + } + self._usingFrontCamera = !self._usingFrontCamera; + AVCaptureDevicePosition position = self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; + AVCaptureDevice *videoDevice = [self findDeviceForPosition:position]; + AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; + [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:[self selectFpsForFormat:selectedFormat] completionHandler:^(NSError* error){ + if (error != nil) { + result([FlutterError errorWithCode:@"Error while switching camera" message:@"Error while switching camera" details:error]); + } else { + result([NSNumber numberWithBool:self._usingFrontCamera]); + } + }]; +} + +-(void)mediaStreamTrackCaptureFrame:(RTCVideoTrack *)track toPath:(NSString *) path result:(FlutterResult)result +{ + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't capture frame."); + return; + } + + FlutterRTCFrameCapturer *capturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track toPath:path result:result]; +} + +-(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track +{ + if (track) { + track.isEnabled = NO; + [self.localTracks removeObjectForKey:track.trackId]; + } +} + +- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice *device in captureDevices) { + if (device.position == position) { + return device; + } + } + return captureDevices[0]; +} + +- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device { + NSArray *formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat *selectedFormat = nil; + int currentDiff = INT_MAX; + for (AVCaptureDeviceFormat *format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + int diff = abs(self._targetWidth - dimension.width) + abs(self._targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, self._targetFps); +} + +@end diff --git a/common/darwin/Classes/FlutterRTCPeerConnection.h b/common/darwin/Classes/FlutterRTCPeerConnection.h new file mode 100755 index 0000000000..b99f885b0a --- /dev/null +++ b/common/darwin/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1,43 @@ +#import "FlutterWebRTCPlugin.h" + +@interface RTCPeerConnection (Flutter) +@property (nonatomic, strong) NSMutableDictionary *dataChannels; +@property (nonatomic, strong) NSMutableDictionary *remoteStreams; +@property (nonatomic, strong) NSMutableDictionary *remoteTracks; +@property (nonatomic, strong) NSString *flutterId; +@property (nonatomic, strong) FlutterEventSink eventSink; +@property (nonatomic, strong) FlutterEventChannel* eventChannel; +@end + +@interface FlutterWebRTCPlugin (RTCPeerConnection) + +-(void) peerConnectionCreateOffer:(NSDictionary *)constraints + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result; + +-(void) peerConnectionCreateAnswer:(NSDictionary *)constraints + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result; + +-(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result; + +-(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result; + +-(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result; + +-(void) peerConnectionGetStats:(nonnull NSString *)trackID + peerConnection:(nonnull RTCPeerConnection *)peerConnection + result:(nonnull FlutterResult)result; + +-(RTCMediaConstraints *) parseMediaConstraints:(nonnull NSDictionary *)constraints; + +-(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration + peerConnection:(RTCPeerConnection*)peerConnection; + +@end diff --git a/common/darwin/Classes/FlutterRTCPeerConnection.m b/common/darwin/Classes/FlutterRTCPeerConnection.m new file mode 100755 index 0000000000..a265d8e8a5 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1,558 @@ +#import +#import "FlutterWebRTCPlugin.h" +#import "FlutterRTCPeerConnection.h" +#import "FlutterRTCDataChannel.h" + +#import + +@implementation RTCPeerConnection (Flutter) + +@dynamic eventSink; + +- (NSString *)flutterId +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setFlutterId:(NSString *)flutterId +{ + objc_setAssociatedObject(self, @selector(flutterId), flutterId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventSink)eventSink +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink +{ + objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel *)eventChannel +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel *)eventChannel +{ + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary *)dataChannels +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setDataChannels:(NSMutableDictionary *)dataChannels +{ + objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary *)remoteStreams +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setRemoteStreams:(NSMutableDictionary *)remoteStreams +{ + objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary *)remoteTracks +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setRemoteTracks:(NSMutableDictionary *)remoteTracks +{ + objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + return nil; +} + +@end + +@implementation FlutterWebRTCPlugin (RTCPeerConnection) + +-(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration + peerConnection:(RTCPeerConnection*)peerConnection +{ + [peerConnection setConfiguration:configuration]; +} + +-(void) peerConnectionCreateOffer:(NSDictionary *)constraints + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result +{ + [peerConnection + offerForConstraints:[self parseMediaConstraints:constraints] + completionHandler:^(RTCSessionDescription *sdp, NSError *error) { + if (error) { + result([FlutterError errorWithCode:@"CreateOfferFailed" + message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] + details:nil]); + } else { + NSString *type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp": sdp.sdp, @"type": type}); + } + }]; +} + +-(void) peerConnectionCreateAnswer:(NSDictionary *)constraints + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result +{ + [peerConnection + answerForConstraints:[self parseMediaConstraints:constraints] + completionHandler:^(RTCSessionDescription *sdp, NSError *error) { + if (error) { + result([FlutterError errorWithCode:@"CreateAnswerFailed" + message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] + details:nil]); + } else { + NSString *type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp": sdp.sdp, @"type": type}); + } + }]; +} + +-(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result +{ + [peerConnection setLocalDescription:sdp completionHandler: ^(NSError *error) { + if (error) { + result([FlutterError errorWithCode:@"SetLocalDescriptionFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +-(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result +{ + [peerConnection setRemoteDescription: sdp completionHandler: ^(NSError *error) { + if (error) { + result([FlutterError errorWithCode:@"SetRemoteDescriptionFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +-(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result +{ + [peerConnection addIceCandidate:candidate]; + result(nil); + //NSLog(@"addICECandidateresult: %@", candidate); +} + +-(void) peerConnectionClose:(RTCPeerConnection *)peerConnection +{ + [peerConnection close]; + + // Clean up peerConnection's streams and tracks + [peerConnection.remoteStreams removeAllObjects]; + [peerConnection.remoteTracks removeAllObjects]; + + // Clean up peerConnection's dataChannels. + NSMutableDictionary *dataChannels + = peerConnection.dataChannels; + for (NSString *dataChannelId in dataChannels) { + dataChannels[dataChannelId].delegate = nil; + // There is no need to close the RTCDataChannel because it is owned by the + // RTCPeerConnection and the latter will close the former. + } + [dataChannels removeAllObjects]; +} + +-(void) peerConnectionGetStats:(nonnull NSString *)trackID + peerConnection:(nonnull RTCPeerConnection *)peerConnection + result:(nonnull FlutterResult)result +{ + RTCMediaStreamTrack *track = nil; + if (!trackID + || !trackID.length + || (track = self.localTracks[trackID]) + || (track = peerConnection.remoteTracks[trackID])) { + [peerConnection statsForTrack:track + statsOutputLevel:RTCStatsOutputLevelStandard + completionHandler:^(NSArray *reports) { + + NSMutableArray *stats = [NSMutableArray array]; + + for (RTCLegacyStatsReport *report in reports) { + [stats addObject:@{@"id": report.reportId, + @"type": report.type, + @"timestamp": @(report.timestamp), + @"values": report.values + }]; + } + + result(@{@"stats": stats}); + }]; + }else{ + result([FlutterError errorWithCode:@"GetStatsFailed" + message:[NSString stringWithFormat:@"Error %@", @""] + details:nil]); + } +} + +- (NSString *)stringForICEConnectionState:(RTCIceConnectionState)state { + switch (state) { + case RTCIceConnectionStateNew: return @"new"; + case RTCIceConnectionStateChecking: return @"checking"; + case RTCIceConnectionStateConnected: return @"connected"; + case RTCIceConnectionStateCompleted: return @"completed"; + case RTCIceConnectionStateFailed: return @"failed"; + case RTCIceConnectionStateDisconnected: return @"disconnected"; + case RTCIceConnectionStateClosed: return @"closed"; + case RTCIceConnectionStateCount: return @"count"; + } + return nil; +} + +- (NSString *)stringForICEGatheringState:(RTCIceGatheringState)state { + switch (state) { + case RTCIceGatheringStateNew: return @"new"; + case RTCIceGatheringStateGathering: return @"gathering"; + case RTCIceGatheringStateComplete: return @"complete"; + } + return nil; +} + +- (NSString *)stringForSignalingState:(RTCSignalingState)state { + switch (state) { + case RTCSignalingStateStable: return @"stable"; + case RTCSignalingStateHaveLocalOffer: return @"have-local-offer"; + case RTCSignalingStateHaveLocalPrAnswer: return @"have-local-pranswer"; + case RTCSignalingStateHaveRemoteOffer: return @"have-remote-offer"; + case RTCSignalingStateHaveRemotePrAnswer: return @"have-remote-pranswer"; + case RTCSignalingStateClosed: return @"closed"; + } + return nil; +} + + +/** + * Parses the constraint keys and values of a specific JavaScript object into + * a specific NSMutableDictionary in a format suitable for the + * initialization of a RTCMediaConstraints instance. + * + * @param src The JavaScript object which defines constraint keys and values and + * which is to be parsed into the specified dst. + * @param dst The NSMutableDictionary into which the constraint keys + * and values defined by src are to be written in a format suitable for + * the initialization of a RTCMediaConstraints instance. + */ +- (void)parseJavaScriptConstraints:(NSDictionary *)src + intoWebRTCConstraints:(NSMutableDictionary *)dst { + for (id srcKey in src) { + id srcValue = src[srcKey]; + NSString *dstValue; + + if ([srcValue isKindOfClass:[NSNumber class]]) { + dstValue = [srcValue boolValue] ? @"true" : @"false"; + } else { + dstValue = [srcValue description]; + } + dst[[srcKey description]] = dstValue; + } +} + +/** + * Parses a JavaScript object into a new RTCMediaConstraints instance. + * + * @param constraints The JavaScript object to parse into a new + * RTCMediaConstraints instance. + * @returns A new RTCMediaConstraints instance initialized with the + * mandatory and optional constraint keys and values specified by + * constraints. + */ +- (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints { + id mandatory = constraints[@"mandatory"]; + NSMutableDictionary *mandatory_ + = [NSMutableDictionary new]; + + if ([mandatory isKindOfClass:[NSDictionary class]]) { + [self parseJavaScriptConstraints:(NSDictionary *)mandatory + intoWebRTCConstraints:mandatory_]; + } + + id optional = constraints[@"optional"]; + NSMutableDictionary *optional_ + = [NSMutableDictionary new]; + + if ([optional isKindOfClass:[NSArray class]]) { + for (id o in (NSArray *)optional) { + if ([o isKindOfClass:[NSDictionary class]]) { + [self parseJavaScriptConstraints:(NSDictionary *)o + intoWebRTCConstraints:optional_]; + } + } + } + + return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ + optionalConstraints:optional_]; +} + +#pragma mark - RTCPeerConnectionDelegate methods + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingState:(RTCSignalingState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"signalingState", + @"state" : [self stringForSignalingState:newState]}); + } +} + +-(void)peerConnection:(RTCPeerConnection *)peerConnection + mediaStream:(RTCMediaStream *)stream didAddTrack:(RTCVideoTrack*)track{ + + peerConnection.remoteTracks[track.trackId] = track; + NSString *streamId = stream.streamId; + peerConnection.remoteStreams[streamId] = stream; + + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onAddTrack", + @"streamId": streamId, + @"trackId": track.trackId, + @"track": @{ + @"id": track.trackId, + @"kind": track.kind, + @"label": track.trackId, + @"enabled": @(track.isEnabled), + @"remote": @(YES), + @"readyState": @"live"} + }); + } +} + +-(void)peerConnection:(RTCPeerConnection *)peerConnection + mediaStream:(RTCMediaStream *)stream didRemoveTrack:(RTCVideoTrack*)track{ + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + NSString *streamId = stream.streamId; + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onRemoveTrack", + @"streamId": streamId, + @"trackId": track.trackId, + @"track": @{ + @"id": track.trackId, + @"kind": track.kind, + @"label": track.trackId, + @"enabled": @(track.isEnabled), + @"remote": @(YES), + @"readyState": @"live"} + }); + } +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream { + NSMutableArray *audioTracks = [NSMutableArray array]; + NSMutableArray *videoTracks = [NSMutableArray array]; + + for (RTCAudioTrack *track in stream.audioTracks) { + peerConnection.remoteTracks[track.trackId] = track; + [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; + } + + for (RTCVideoTrack *track in stream.videoTracks) { + peerConnection.remoteTracks[track.trackId] = track; + [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; + } + + NSString *streamId = stream.streamId; + peerConnection.remoteStreams[streamId] = stream; + + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onAddStream", + @"streamId": streamId, + @"audioTracks": audioTracks, + @"videoTracks": videoTracks, + }); + } +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream { + NSArray *keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; + // We assume there can be only one object for 1 key + if (keysArray.count > 1) { + NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", stream.streamId); + } + NSString *streamId = stream.streamId; + + for (RTCVideoTrack *track in stream.videoTracks) { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + } + for (RTCAudioTrack *track in stream.audioTracks) { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + } + [peerConnection.remoteStreams removeObjectForKey:streamId]; + + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onRemoveStream", + @"streamId": streamId, + }); + } +} + +- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection { + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{@"event" : @"onRenegotiationNeeded",}); + } +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"iceConnectionState", + @"state" : [self stringForICEConnectionState:newState] + }); + } +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"iceGatheringState", + @"state" : [self stringForICEGatheringState:newState] + }); + } +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate { + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onCandidate", + @"candidate" : @{@"candidate": candidate.sdp, @"sdpMLineIndex": @(candidate.sdpMLineIndex), @"sdpMid": candidate.sdpMid} + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RTCDataChannel*)dataChannel { + if (-1 == dataChannel.channelId) { + return; + } + + NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; + dataChannel.peerConnectionId = peerConnection.flutterId; + dataChannel.delegate = self; + peerConnection.dataChannels[dataChannelId] = dataChannel; + + FlutterEventChannel *eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnection.flutterId, dataChannel.channelId] + binaryMessenger:self.messenger]; + + dataChannel.eventChannel = eventChannel; + dataChannel.flutterChannelId = dataChannelId; + [eventChannel setStreamHandler:dataChannel]; + + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"didOpenDataChannel", + @"id": dataChannelId, + @"label": dataChannel.label + }); + } +} + +/** Called any time the PeerConnectionState changes. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection +didChangeConnectionState:(RTCPeerConnectionState)newState { + +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection +didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver { + +} + +/** Called when a receiver and its track are created. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection + didAddReceiver:(RTCRtpReceiver *)rtpReceiver + streams:(NSArray *)mediaStreams { + // For unified-plan + NSMutableArray* streams = [NSMutableArray array]; + for(RTCMediaStream *stream in mediaStreams) { + [streams addObject:[self mediaStreamToMap:stream ownerTag:peerConnection.flutterId]]; + } + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event": @"onTrack", + @"track": [self mediaTrackToMap:rtpReceiver.track], + @"receiver": [self receiverToMap:rtpReceiver], + @"streams": streams, + }); + } +} + +/** Called when the receiver and its track are removed. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection + didRemoveReceiver:(RTCRtpReceiver *)rtpReceiver { + +} + +/** Called when the selected ICE candidate pair is changed. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection + didChangeLocalCandidate:(RTCIceCandidate *)local + remoteCandidate:(RTCIceCandidate *)remote + lastReceivedMs:(int)lastDataReceivedMs + changeReason:(NSString *)reason { + + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onSelectedCandidatePairChanged", + @"local" : @{ + @"candidate": local.sdp, + @"sdpMLineIndex": @(local.sdpMLineIndex), + @"sdpMid": local.sdpMid + }, + @"remote" : @{ + @"candidate": remote.sdp, + @"sdpMLineIndex": @(remote.sdpMLineIndex), + @"sdpMid": remote.sdpMid + }, + @"reason": reason, + @"lastDataReceivedMs": @(lastDataReceivedMs) + }); + } +} + +@end + diff --git a/common/darwin/Classes/FlutterRTCVideoRenderer.h b/common/darwin/Classes/FlutterRTCVideoRenderer.h new file mode 100755 index 0000000000..96dcd2203d --- /dev/null +++ b/common/darwin/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1,33 @@ +#import "FlutterWebRTCPlugin.h" + +#import +#import +#import +#import + +@interface FlutterRTCVideoRenderer : NSObject + +/** + * The {@link RTCVideoTrack}, if any, which this instance renders. + */ +@property (nonatomic, strong) RTCVideoTrack *videoTrack; +@property (nonatomic) int64_t textureId; +@property (nonatomic, weak) id registry; +@property (nonatomic, strong) FlutterEventSink eventSink; + +- (instancetype)initWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger; + +- (void)dispose; + +@end + + +@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) + +- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger; + +-(void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack; + +@end diff --git a/common/darwin/Classes/FlutterRTCVideoRenderer.m b/common/darwin/Classes/FlutterRTCVideoRenderer.m new file mode 100755 index 0000000000..234849ee2c --- /dev/null +++ b/common/darwin/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1,268 @@ +#import "FlutterRTCVideoRenderer.h" + +#import +#import +#import +#import + +#import +#include "libyuv.h" + +#import "FlutterWebRTCPlugin.h" + +@implementation FlutterRTCVideoRenderer { + CGSize _frameSize; + CGSize _renderSize; + CVPixelBufferRef _pixelBufferRef; + RTCVideoRotation _rotation; + FlutterEventChannel* _eventChannel; + bool _isFirstFrameRendered; +} + +@synthesize textureId = _textureId; +@synthesize registry = _registry; +@synthesize eventSink = _eventSink; + +- (instancetype)initWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger{ + self = [super init]; + if (self){ + _isFirstFrameRendered = false; + _frameSize = CGSizeZero; + _renderSize = CGSizeZero; + _rotation = -1; + _registry = registry; + _pixelBufferRef = nil; + _eventSink = nil; + _rotation = -1; + _textureId = [registry registerTexture:self]; + /*Create Event Channel.*/ + _eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId] + binaryMessenger:messenger]; + [_eventChannel setStreamHandler:self]; + } + return self; +} + +-(void)dealloc { + if(_pixelBufferRef){ + CVBufferRelease(_pixelBufferRef); + } +} + +- (CVPixelBufferRef)copyPixelBuffer { + if(_pixelBufferRef != nil){ + CVBufferRetain(_pixelBufferRef); + return _pixelBufferRef; + } + return nil; +} + +-(void)dispose{ + [_registry unregisterTexture:_textureId]; +} + +- (void)setVideoTrack:(RTCVideoTrack *)videoTrack { + RTCVideoTrack *oldValue = self.videoTrack; + + if (oldValue != videoTrack) { + _isFirstFrameRendered = false; + if (oldValue) { + [oldValue removeRenderer:self]; + } + _videoTrack = videoTrack; + _frameSize = CGSizeZero; + _renderSize = CGSizeZero; + _rotation = -1; + if (videoTrack) { + [videoTrack addRenderer:self]; + } + } +} + + +-(id) correctRotation:(const id) src + withRotation:(RTCVideoRotation) rotation +{ + + int rotated_width = src.width; + int rotated_height = src.height; + + if (rotation == RTCVideoRotation_90 || + rotation == RTCVideoRotation_270) { + int temp = rotated_width; + rotated_width = rotated_height; + rotated_height = temp; + } + + id buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width height:rotated_height]; + + I420Rotate(src.dataY, src.strideY, + src.dataU, src.strideU, + src.dataV, src.strideV, + (uint8_t*)buffer.dataY, buffer.strideY, + (uint8_t*)buffer.dataU,buffer.strideU, + (uint8_t*)buffer.dataV, buffer.strideV, + src.width, src.height, + (RotationModeEnum)rotation); + + return buffer; +} + +-(void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer withFrame:(RTCVideoFrame *) frame +{ + id i420Buffer = [self correctRotation:[frame.buffer toI420] withRotation:frame.rotation]; + CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); + + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); + if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || + pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + // NV12 + uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); + const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); + uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); + const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); + + I420ToNV12(i420Buffer.dataY, + i420Buffer.strideY, + i420Buffer.dataU, + i420Buffer.strideU, + i420Buffer.dataV, + i420Buffer.strideV, + dstY, + (int)dstYStride, + dstUV, + (int)dstUVStride, + i420Buffer.width, + i420Buffer.height); + } else { + uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); + const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); + + if (pixelFormat == kCVPixelFormatType_32BGRA) { + // Corresponds to libyuv::FOURCC_ARGB + I420ToARGB(i420Buffer.dataY, + i420Buffer.strideY, + i420Buffer.dataU, + i420Buffer.strideU, + i420Buffer.dataV, + i420Buffer.strideV, + dst, + (int)bytesPerRow, + i420Buffer.width, + i420Buffer.height); + } else if (pixelFormat == kCVPixelFormatType_32ARGB) { + // Corresponds to libyuv::FOURCC_BGRA + I420ToBGRA(i420Buffer.dataY, + i420Buffer.strideY, + i420Buffer.dataU, + i420Buffer.strideU, + i420Buffer.dataV, + i420Buffer.strideV, + dst, + (int)bytesPerRow, + i420Buffer.width, + i420Buffer.height); + } + } + + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); +} + +#pragma mark - RTCVideoRenderer methods +- (void)renderFrame:(RTCVideoFrame *)frame { + + [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; + + __weak FlutterRTCVideoRenderer *weakSelf = self; + if(_renderSize.width != frame.width || _renderSize.height != frame.height){ + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer *strongSelf = weakSelf; + if(strongSelf.eventSink){ + strongSelf.eventSink(@{ + @"event" : @"didTextureChangeVideoSize", + @"id": @(strongSelf.textureId), + @"width": @(frame.width), + @"height": @(frame.height), + }); + } + }); + _renderSize = CGSizeMake(frame.width, frame.height); + } + + if(frame.rotation != _rotation){ + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer *strongSelf = weakSelf; + if(strongSelf.eventSink){ + strongSelf.eventSink(@{ + @"event" : @"didTextureChangeRotation", + @"id": @(strongSelf.textureId), + @"rotation": @(frame.rotation), + }); + } + }); + + _rotation = frame.rotation; + } + + //Notify the Flutter new pixelBufferRef to be ready. + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer *strongSelf = weakSelf; + [strongSelf.registry textureFrameAvailable:strongSelf.textureId]; + if (!strongSelf->_isFirstFrameRendered) { + if (strongSelf.eventSink) { + strongSelf.eventSink(@{@"event":@"didFirstFrameRendered"}); + strongSelf->_isFirstFrameRendered = true; + } + } + }); +} + +/** + * Sets the size of the video frame to render. + * + * @param size The size of the video frame to render. + */ +- (void)setSize:(CGSize)size { + if(_pixelBufferRef == nil || (size.width != _frameSize.width || size.height != _frameSize.height)) + { + if(_pixelBufferRef){ + CVBufferRelease(_pixelBufferRef); + } + NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferCreate(kCFAllocatorDefault, + size.width, size.height, + kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); + + _frameSize = size; + } +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + _eventSink = sink; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) + +- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger{ + return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger]; +} + +-(void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack { + renderer.videoTrack = videoTrack; +} +@end + diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.h b/common/darwin/Classes/FlutterWebRTCPlugin.h new file mode 100644 index 0000000000..93c81725a5 --- /dev/null +++ b/common/darwin/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1,34 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_MAC +#import +#endif + +#import +#import + +@class FlutterRTCVideoRenderer; + +@interface FlutterWebRTCPlugin : NSObject + +@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory; +@property (nonatomic, strong) NSMutableDictionary *peerConnections; +@property (nonatomic, strong) NSMutableDictionary *localStreams; +@property (nonatomic, strong) NSMutableDictionary *localTracks; +@property (nonatomic, strong) NSMutableDictionary *renders; +#if TARGET_OS_IPHONE +@property (nonatomic, retain) UIViewController *viewController;/*for broadcast or ReplayKit */ +#endif +@property (nonatomic, strong) NSObject* messenger; +@property (nonatomic, strong) RTCCameraVideoCapturer *videoCapturer; +@property (nonatomic) BOOL _usingFrontCamera; +@property (nonatomic) int _targetWidth; +@property (nonatomic) int _targetHeight; +@property (nonatomic) int _targetFps; + +- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId; +- (NSDictionary*)mediaStreamToMap:(RTCMediaStream *)stream ownerTag:(NSString*)ownerTag; +- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track; +- (NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver; + +@end diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.m b/common/darwin/Classes/FlutterWebRTCPlugin.m new file mode 100644 index 0000000000..2c6afad1f0 --- /dev/null +++ b/common/darwin/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1,1415 @@ +#import "FlutterWebRTCPlugin.h" +#import "FlutterRTCPeerConnection.h" +#import "FlutterRTCMediaStream.h" +#import "FlutterRTCDataChannel.h" +#import "FlutterRTCVideoRenderer.h" + +#import +#import + +@implementation FlutterWebRTCPlugin { + FlutterMethodChannel *_methodChannel; + id _registry; + id _messenger; + id _textures; + BOOL _speakerOn; +} + +@synthesize messenger = _messenger; + ++ (void)registerWithRegistrar:(NSObject*)registrar { + + FlutterMethodChannel* channel = [FlutterMethodChannel + methodChannelWithName:@"FlutterWebRTC.Method" + binaryMessenger:[registrar messenger]]; +#if TARGET_OS_IPHONE + UIViewController *viewController = (UIViewController *)registrar.messenger; +#endif + FlutterWebRTCPlugin* instance = [[FlutterWebRTCPlugin alloc] initWithChannel:channel + registrar:registrar + messenger:[registrar messenger] +#if TARGET_OS_IPHONE + viewController:viewController +#endif + withTextures:[registrar textures]]; + [registrar addMethodCallDelegate:instance channel:channel]; +} + +- (instancetype)initWithChannel:(FlutterMethodChannel *)channel + registrar:(NSObject*)registrar + messenger:(NSObject*)messenger +#if TARGET_OS_IPHONE + viewController:(UIViewController *)viewController +#endif + withTextures:(NSObject *)textures{ + + self = [super init]; + + if (self) { + _methodChannel = channel; + _registry = registrar; + _textures = textures; + _messenger = messenger; + _speakerOn = NO; +#if TARGET_OS_IPHONE + self.viewController = viewController; +#endif + } + //RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose); + RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init]; + RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init]; + + _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] + initWithEncoderFactory:encoderFactory + decoderFactory:decoderFactory]; + + + self.peerConnections = [NSMutableDictionary new]; + self.localStreams = [NSMutableDictionary new]; + self.localTracks = [NSMutableDictionary new]; + self.renders = [[NSMutableDictionary alloc] init]; +#if TARGET_OS_IPHONE + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didSessionRouteChange:) name:AVAudioSessionRouteChangeNotification object:nil]; +#endif + return self; +} + + +- (void)didSessionRouteChange:(NSNotification *)notification { + NSDictionary *interuptionDict = notification.userInfo; +#if TARGET_OS_IPHONE + NSInteger routeChangeReason = [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue]; + + switch (routeChangeReason) { + case AVAudioSessionRouteChangeReasonCategoryChange: { + NSError* error; + [[AVAudioSession sharedInstance] overrideOutputAudioPort:_speakerOn? AVAudioSessionPortOverrideSpeaker : AVAudioSessionPortOverrideNone error:&error]; + } + break; + + default: + break; + } +#endif +} + +- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result { + + if ([@"createPeerConnection" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* configuration = argsMap[@"configuration"]; + NSDictionary* constraints = argsMap[@"constraints"]; + + RTCPeerConnection *peerConnection = [self.peerConnectionFactory + peerConnectionWithConfiguration:[self RTCConfiguration:configuration] + constraints:[self parseMediaConstraints:constraints] + delegate:self]; + + peerConnection.remoteStreams = [NSMutableDictionary new]; + peerConnection.remoteTracks = [NSMutableDictionary new]; + peerConnection.dataChannels = [NSMutableDictionary new]; + + NSString *peerConnectionId = [[NSUUID UUID] UUIDString]; + peerConnection.flutterId = peerConnectionId; + + /*Create Event Channel.*/ + peerConnection.eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/peerConnectoinEvent%@", peerConnectionId] + binaryMessenger:_messenger]; + [peerConnection.eventChannel setStreamHandler:peerConnection]; + + self.peerConnections[peerConnectionId] = peerConnection; + result(@{ @"peerConnectionId" : peerConnectionId}); + } else if ([@"getUserMedia" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + [self getUserMedia:constraints result:result]; + } else if ([@"getDisplayMedia" isEqualToString:call.method]) { +#if TARGET_OS_IPHONE + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + [self getDisplayMedia:constraints result:result]; +#else + result(FlutterMethodNotImplemented); +#endif + } else if ([@"createLocalMediaStream" isEqualToString:call.method]) { + [self createLocalMediaStream:result]; + } else if ([@"getSources" isEqualToString:call.method]) { + [self getSources:result]; + } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + [self mediaStreamGetTracks:streamId result:result]; + } else if ([@"createOffer" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) + { + [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result ]; + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"createAnswer" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary * constraints = argsMap[@"constraints"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) + { + [self peerConnectionCreateAnswer:constraints + peerConnection:peerConnection + result:result]; + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addStream" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + + NSString* streamId = ((NSString*)argsMap[@"streamId"]); + RTCMediaStream *stream = self.localStreams[streamId]; + + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + + if(peerConnection && stream){ + [peerConnection addStream:stream]; + result(@""); + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] + details:nil]); + } + } else if ([@"removeStream" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + + NSString* streamId = ((NSString*)argsMap[@"streamId"]); + RTCMediaStream *stream = self.localStreams[streamId]; + + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + + if(peerConnection && stream){ + [peerConnection removeStream:stream]; + result(nil); + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] + details:nil]); + } + } else if ([@"captureFrame" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* path = argsMap[@"path"]; + NSString* trackId = argsMap[@"trackId"]; + + RTCMediaStreamTrack *track = [self trackForId: trackId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); + } + } + } else if ([@"setLocalDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + NSDictionary *descriptionMap = argsMap[@"description"]; + NSString* sdp = descriptionMap[@"sdp"]; + RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; + RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; + if(peerConnection) + { + [self peerConnectionSetLocalDescription:description peerConnection:peerConnection result:result]; + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"setRemoteDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + NSDictionary *descriptionMap = argsMap[@"description"]; + NSString* sdp = descriptionMap[@"sdp"]; + RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; + RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; + + if(peerConnection) + { + [self peerConnectionSetRemoteDescription:description peerConnection:peerConnection result:result]; + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"sendDtmf" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* tone = argsMap[@"tone"]; + int duration = ((NSNumber*)argsMap[@"duration"]).intValue; + int interToneGap = ((NSNumber*)argsMap[@"gap"]).intValue; + + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) { + + RTCRtpSender* audioSender = nil ; + for( RTCRtpSender *rtpSender in peerConnection.senders){ + if([[[rtpSender track] kind] isEqualToString:@"audio"]) { + audioSender = rtpSender; + } + } + if(audioSender){ + NSOperationQueue *queue = [[NSOperationQueue alloc] init]; + [queue addOperationWithBlock:^{ + double durationMs = duration / 1000.0; + double interToneGapMs = interToneGap / 1000.0; + [audioSender.dtmfSender insertDtmf :(NSString *)tone + duration:(NSTimeInterval) durationMs interToneGap:(NSTimeInterval)interToneGapMs]; + NSLog(@"DTMF Tone played "); + }]; + } + + result(@{@"result": @"success"}); + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addCandidate" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* candMap = argsMap[@"candidate"]; + NSString *sdp = candMap[@"candidate"]; + int sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; + NSString *sdpMid = candMap[@"sdpMid"]; + + RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp sdpMLineIndex:sdpMLineIndex sdpMid:sdpMid]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + + if(peerConnection) + { + [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getStats" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) + return [self peerConnectionGetStats:trackId peerConnection:peerConnection result:result]; + result(nil); + } else if ([@"createDataChannel" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* label = argsMap[@"label"]; + NSDictionary * dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; + [self createDataChannel:peerConnectionId + label:label + config:[self RTCDataChannelConfiguration:dataChannelDict] + messenger:_messenger]; + result(nil); + } else if ([@"dataChannelSend" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + NSString* type = argsMap[@"type"]; + id data = argsMap[@"data"]; + + [self dataChannelSend:peerConnectionId + dataChannelId:dataChannelId + data:data + type:type]; + result(nil); + } else if ([@"dataChannelClose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + [self dataChannelClose:peerConnectionId + dataChannelId:dataChannelId]; + result(nil); + } else if ([@"streamDispose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + RTCMediaStream *stream = self.localStreams[streamId]; + if (stream) { + for (RTCVideoTrack *track in stream.videoTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + RTCVideoSource *source = videoTrack.source; + if(source){ + [self.videoCapturer stopCapture]; + self.videoCapturer = nil; + } + } + for (RTCAudioTrack *track in stream.audioTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + [self.localStreams removeObjectForKey:streamId]; + } + result(nil); + } else if ([@"mediaStreamTrackSetEnable" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* enabled = argsMap[@"enabled"]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if(track != nil){ + track.isEnabled = enabled.boolValue; + } + result(nil); + } else if ([@"mediaStreamAddTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + NSString* trackId = argsMap[@"trackId"]; + + RTCMediaStream *stream = self.localStreams[streamId]; + if (stream) { + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if(track != nil) { + if([track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; + [stream addAudioTrack:audioTrack]; + } else if ([track isKindOfClass:[RTCVideoTrack class]]){ + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [stream addVideoTrack:videoTrack]; + } + } else { + result([FlutterError errorWithCode:@"mediaStreamAddTrack: Track is nil" message:nil details:nil]); + } + } else { + result([FlutterError errorWithCode:@"mediaStreamAddTrack: Stream is nil" message:nil details:nil]); + } + result(nil); + } else if ([@"mediaStreamRemoveTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCMediaStream *stream = self.localStreams[streamId]; + if (stream) { + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if(track != nil) { + if([track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; + [stream removeAudioTrack:audioTrack]; + } else if ([track isKindOfClass:[RTCVideoTrack class]]){ + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [stream removeVideoTrack:videoTrack]; + } + } else { + result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Track is nil" message:nil details:nil]); + } + } else { + result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Stream is nil" message:nil details:nil]); + } + result(nil); + } else if ([@"trackDispose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + [self.localTracks removeObjectForKey:trackId]; + result(nil); + } else if ([@"peerConnectionClose" isEqualToString:call.method] || [@"peerConnectionDispose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [peerConnection close]; + [self.peerConnections removeObjectForKey:peerConnectionId]; + + // Clean up peerConnection's streams and tracks + [peerConnection.remoteStreams removeAllObjects]; + [peerConnection.remoteTracks removeAllObjects]; + + // Clean up peerConnection's dataChannels. + NSMutableDictionary *dataChannels = peerConnection.dataChannels; + for (NSNumber *dataChannelId in dataChannels) { + dataChannels[dataChannelId].delegate = nil; + // There is no need to close the RTCDataChannel because it is owned by the + // RTCPeerConnection and the latter will close the former. + } + [dataChannels removeAllObjects]; + } + result(nil); + } else if ([@"createVideoRenderer" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + FlutterRTCVideoRenderer* render = [self createWithTextureRegistry:_textures + messenger:_messenger]; + self.renders[@(render.textureId)] = render; + result(@{@"textureId": @(render.textureId)}); + } else if ([@"videoRendererDispose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSNumber *textureId = argsMap[@"textureId"]; + FlutterRTCVideoRenderer *render = self.renders[textureId]; + render.videoTrack = nil; + [render dispose]; + [self.renders removeObjectForKey:textureId]; + result(nil); + } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSNumber *textureId = argsMap[@"textureId"]; + FlutterRTCVideoRenderer *render = self.renders[textureId]; + NSString *streamId = argsMap[@"streamId"]; + NSString *ownerTag = argsMap[@"ownerTag"]; + if(!render) { + result([FlutterError errorWithCode:@"videoRendererSetSrcObject: render is nil" message:nil details:nil]); + return; + } + RTCMediaStream *stream = nil; + RTCVideoTrack* videoTrack = nil; + if([ownerTag isEqualToString:@"local"]){ + stream = _localStreams[streamId]; + } + if(!stream){ + stream = [self streamForId:streamId peerConnectionId:ownerTag]; + } + if(stream){ + NSArray *videoTracks = stream ? stream.videoTracks : nil; + videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; + if (!videoTrack) { + NSLog(@"Not found video track for RTCMediaStream: %@", streamId); + } + } + [self rendererSetSrcObject:render stream:videoTrack]; + result(nil); + } else if ([@"mediaStreamTrackHasTorch" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [self mediaStreamTrackHasTorch:videoTrack result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); + } + } + } else if ([@"mediaStreamTrackSetTorch" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + BOOL torch = [argsMap[@"torch"] boolValue]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [self mediaStreamTrackSetTorch:videoTrack torch:torch result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); + } + } + } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [self mediaStreamTrackSwitchCamera:videoTrack result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); + } + } + } else if ([@"setVolume" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* volume = argsMap[@"volume"]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; + RTCAudioSource *audioSource = audioTrack.source; + audioSource.volume = [volume doubleValue]; + } + result(nil); + } else if ([@"setMicrophoneMute" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* mute = argsMap[@"mute"]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; + audioTrack.isEnabled = !mute.boolValue; + } + result(nil); + } else if ([@"enableSpeakerphone" isEqualToString:call.method]) { +#if TARGET_OS_IPHONE + NSDictionary* argsMap = call.arguments; + NSNumber* enable = argsMap[@"enable"]; + _speakerOn = enable.boolValue; + AVAudioSession *audioSession = [AVAudioSession sharedInstance]; + [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord + withOptions:_speakerOn ? AVAudioSessionCategoryOptionDefaultToSpeaker : 0 + error:nil]; + [audioSession setActive:YES error:nil]; + result(nil); +#else + result(FlutterMethodNotImplemented); +#endif + } else if ([@"getLocalDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) { + RTCSessionDescription* sdp = peerConnection.localDescription; + NSString *type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp": sdp.sdp, @"type": type}); + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getRemoteDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) { + RTCSessionDescription* sdp = peerConnection.remoteDescription; + NSString *type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp": sdp.sdp, @"type": type}); + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"setConfiguration" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* configuration = argsMap[@"configuration"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) { + [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] peerConnection:peerConnection]; + result(nil); + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"createSender" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* kind = argsMap[@"kind"]; + NSString* streamId = argsMap[@"streamId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [peerConnection senderWithKind:kind streamId:streamId]; + result([self rtpSenderToMap:sender]); + } else if ([@"closeSender" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + + if(![peerConnection removeTrack:sender]) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: can't close sender!"] + details:nil]); + return; + } + + result(nil); + } else if ([@"addTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* trackId = argsMap[@"trackId"]; + NSArray* streamIds = argsMap[@"streamIds"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if(track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [peerConnection addTrack:track streamIds:streamIds]; + result([self rtpSenderToMap:sender]); + } else if ([@"removeTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [peerConnection removeTrack:sender]; + result(nil); + } else if ([@"addTransceiver" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* transceiverInit = argsMap[@"transceiverInit"]; + NSString* trackId = argsMap[@"trackId"]; + NSString* mediaType = argsMap[@"mediaType"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transceiver = nil; + + if(trackId != nil) { + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit *init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverWithTrack:track init:init]; + } else { + transceiver = [peerConnection addTransceiverWithTrack:track]; + } + } else if (mediaType != nil) { + RTCRtpMediaType rtpMediaType = [self stringToRtpMediaType:mediaType]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit *init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverOfType:(rtpMediaType) init:init]; + } else { + transceiver = [peerConnection addTransceiverOfType:rtpMediaType]; + } + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: Incomplete parameters!"] + details:nil]); + return; + } + + if (transceiver == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: can't addTransceiver!"] + details:nil]); + return; + } + + result([self transceiverToMap:transceiver]); + } else if ([@"rtpTransceiverSetDirection" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* direction = argsMap[@"direction"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if(transcevier == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } +#if TARGET_OS_IPHONE + [transcevier setDirection:[self stringToTransceiverDirection:direction] error:nil]; +#elif TARGET_OS_MAC + [transcevier setDirection:[self stringToTransceiverDirection:direction]]; +#endif + result(nil); + } else if ([@"rtpTransceiverGetCurrentDirection" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if(transcevier == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + result(@{@"result": [self transceiverDirectionString:transcevier.direction]}); + } else if ([@"rtpTransceiverStop" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if(transcevier == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } +#if TARGET_OS_IPHONE + [transcevier stopInternal]; +#elif TARGET_OS_MAC + [transcevier stop]; +#endif + result(nil); + } else if ([@"rtpSenderSetParameters" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + NSDictionary* parameters = argsMap[@"parameters"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [sender setParameters:[self mapToRtpParameters:parameters]]; + + result(nil); + } else if ([@"rtpSenderReplaceTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if(track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderSetTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if(track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderDispose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [peerConnection removeTrack:sender]; + result(nil); + } else { + result(FlutterMethodNotImplemented); + } +} + +- (void)dealloc +{ + [_localTracks removeAllObjects]; + _localTracks = nil; + [_localStreams removeAllObjects]; + _localStreams = nil; + + for (NSString *peerConnectionId in _peerConnections) { + RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId]; + peerConnection.delegate = nil; + [peerConnection close]; + } + [_peerConnections removeAllObjects]; + _peerConnectionFactory = nil; +} + + +-(void)mediaStreamGetTracks:(NSString*)streamId + result:(FlutterResult)result { + RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""]; + if(stream){ + NSMutableArray *audioTracks = [NSMutableArray array]; + NSMutableArray *videoTracks = [NSMutableArray array]; + + for (RTCMediaStreamTrack *track in stream.audioTracks) { + NSString *trackId = track.trackId; + [self.localTracks setObject:track forKey:trackId]; + [audioTracks addObject:@{ + @"enabled": @(track.isEnabled), + @"id": trackId, + @"kind": track.kind, + @"label": trackId, + @"readyState": @"live", + @"remote": @(NO) + }]; + } + + for (RTCMediaStreamTrack *track in stream.videoTracks) { + NSString *trackId = track.trackId; + [self.localTracks setObject:track forKey:trackId]; + [videoTracks addObject:@{ + @"enabled": @(track.isEnabled), + @"id": trackId, + @"kind": track.kind, + @"label": trackId, + @"readyState": @"live", + @"remote": @(NO) + }]; + } + + result(@{@"audioTracks": audioTracks, @"videoTracks" : videoTracks }); + }else{ + result(nil); + } +} + +- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId { + RTCMediaStream *stream = nil; + if (peerConnectionId.length > 0) { + RTCPeerConnection *peerConnection = [_peerConnections objectForKey:peerConnectionId]; + stream = peerConnection.remoteStreams[streamId]; + } else { + for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { + stream = peerConnection.remoteStreams[streamId]; + if (stream) { + break; + } + } + } + if (!stream) { + stream = _localStreams[streamId]; + } + return stream; +} + +- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId { + RTCMediaStreamTrack *track = _localTracks[trackId]; + if (!track) { + for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { + track = peerConnection.remoteTracks[trackId]; + if (track) { + break; + } + } + } + return track; +} + + + +- (RTCIceServer *)RTCIceServer:(id)json +{ + if (!json) { + NSLog(@"a valid iceServer value"); + return nil; + } + + if (![json isKindOfClass:[NSDictionary class]]) { + NSLog(@"must be an object"); + return nil; + } + + NSArray *urls; + if ([json[@"url"] isKindOfClass:[NSString class]]) { + // TODO: 'url' is non-standard + urls = @[json[@"url"]]; + } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { + urls = @[json[@"urls"]]; + } else { + urls = (NSArray*)json[@"urls"]; + } + + if (json[@"username"] != nil || json[@"credential"] != nil) { + return [[RTCIceServer alloc]initWithURLStrings:urls + username:json[@"username"] + credential:json[@"credential"]]; + } + + return [[RTCIceServer alloc] initWithURLStrings:urls]; +} + + +- (nonnull RTCConfiguration *)RTCConfiguration:(id)json +{ + RTCConfiguration *config = [[RTCConfiguration alloc] init]; + + if (!json) { + return config; + } + + if (![json isKindOfClass:[NSDictionary class]]) { + NSLog(@"must be an object"); + return config; + } + + if (json[@"audioJitterBufferMaxPackets"] != nil && [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { + config.audioJitterBufferMaxPackets = [json[@"audioJitterBufferMaxPackets"] intValue]; + } + + if (json[@"bundlePolicy"] != nil && [json[@"bundlePolicy"] isKindOfClass:[NSString class]]) { + NSString *bundlePolicy = json[@"bundlePolicy"]; + if ([bundlePolicy isEqualToString:@"balanced"]) { + config.bundlePolicy = RTCBundlePolicyBalanced; + } else if ([bundlePolicy isEqualToString:@"max-compat"]) { + config.bundlePolicy = RTCBundlePolicyMaxCompat; + } else if ([bundlePolicy isEqualToString:@"max-bundle"]) { + config.bundlePolicy = RTCBundlePolicyMaxBundle; + } + } + + if (json[@"iceBackupCandidatePairPingInterval"] != nil && [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { + config.iceBackupCandidatePairPingInterval = [json[@"iceBackupCandidatePairPingInterval"] intValue]; + } + + if (json[@"iceConnectionReceivingTimeout"] != nil && [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { + config.iceConnectionReceivingTimeout = [json[@"iceConnectionReceivingTimeout"] intValue]; + } + + if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { + NSMutableArray *iceServers = [NSMutableArray new]; + for (id server in json[@"iceServers"]) { + RTCIceServer *convert = [self RTCIceServer:server]; + if (convert != nil) { + [iceServers addObject:convert]; + } + } + config.iceServers = iceServers; + } + + if (json[@"iceTransportPolicy"] != nil && [json[@"iceTransportPolicy"] isKindOfClass:[NSString class]]) { + NSString *iceTransportPolicy = json[@"iceTransportPolicy"]; + if ([iceTransportPolicy isEqualToString:@"all"]) { + config.iceTransportPolicy = RTCIceTransportPolicyAll; + } else if ([iceTransportPolicy isEqualToString:@"none"]) { + config.iceTransportPolicy = RTCIceTransportPolicyNone; + } else if ([iceTransportPolicy isEqualToString:@"nohost"]) { + config.iceTransportPolicy = RTCIceTransportPolicyNoHost; + } else if ([iceTransportPolicy isEqualToString:@"relay"]) { + config.iceTransportPolicy = RTCIceTransportPolicyRelay; + } + } + + if (json[@"rtcpMuxPolicy"] != nil && [json[@"rtcpMuxPolicy"] isKindOfClass:[NSString class]]) { + NSString *rtcpMuxPolicy = json[@"rtcpMuxPolicy"]; + if ([rtcpMuxPolicy isEqualToString:@"negotiate"]) { + config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate; + } else if ([rtcpMuxPolicy isEqualToString:@"require"]) { + config.rtcpMuxPolicy = RTCRtcpMuxPolicyRequire; + } + } + + if (json[@"tcpCandidatePolicy"] != nil && [json[@"tcpCandidatePolicy"] isKindOfClass:[NSString class]]) { + NSString *tcpCandidatePolicy = json[@"tcpCandidatePolicy"]; + if ([tcpCandidatePolicy isEqualToString:@"enabled"]) { + config.tcpCandidatePolicy = RTCTcpCandidatePolicyEnabled; + } else if ([tcpCandidatePolicy isEqualToString:@"disabled"]) { + config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled; + } + } + + if (json[@"sdpSemantics"] != nil && [json[@"sdpSemantics"] isKindOfClass:[NSString class]]) { + NSString *sdpSemantics = json[@"sdpSemantics"]; + if ([sdpSemantics isEqualToString:@"plan-b"]) { + config.sdpSemantics = RTCSdpSemanticsPlanB; + } else if ([sdpSemantics isEqualToString:@"unified-plan"]) { + config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; + } + } + + return config; +} + +- (RTCDataChannelConfiguration *)RTCDataChannelConfiguration:(id)json +{ + if (!json) { + return nil; + } + if ([json isKindOfClass:[NSDictionary class]]) { + RTCDataChannelConfiguration *init = [RTCDataChannelConfiguration new]; + + if (json[@"id"]) { + [init setChannelId:(int)[json[@"id"] integerValue]]; + } + if (json[@"ordered"]) { + init.isOrdered = [json[@"ordered"] boolValue]; + } + if (json[@"maxRetransmitTime"]) { + init.maxRetransmitTimeMs = [json[@"maxRetransmitTime"] integerValue]; + } + if (json[@"maxRetransmits"]) { + init.maxRetransmits = [json[@"maxRetransmits"] intValue]; + } + if (json[@"negotiated"]) { + init.isNegotiated = [json[@"negotiated"] boolValue]; + } + if (json[@"protocol"]) { + init.protocol = json[@"protocol"]; + } + return init; + } + return nil; +} + +- (CGRect)parseRect:(NSDictionary *)rect { + return CGRectMake([[rect valueForKey:@"left"] doubleValue], + [[rect valueForKey:@"top"] doubleValue], + [[rect valueForKey:@"width"] doubleValue], + [[rect valueForKey:@"height"] doubleValue]); +} + +- (NSDictionary*)dtmfSenderToMap:(id)dtmf Id:(NSString*)Id { + return @{ + @"dtmfSenderId": Id, + @"interToneGap": @(dtmf.interToneGap / 1000.0), + @"duration": @(dtmf.duration / 1000.0), + }; +} + +- (NSDictionary*)rtpParametersToMap:(RTCRtpParameters*)parameters { + NSDictionary *rtcp = @{ + @"cname": parameters.rtcp.cname, + @"reducedSize": @(parameters.rtcp.isReducedSize), + }; + + NSMutableArray *headerExtensions = [NSMutableArray array]; + for (RTCRtpHeaderExtension* headerExtension in parameters.headerExtensions) { + [headerExtensions addObject:@{ + @"uri": headerExtension.uri, + @"encrypted": @(headerExtension.encrypted), + @"id": @(headerExtension.id), + }]; + } + + NSMutableArray *encodings = [NSMutableArray array]; + for (RTCRtpEncodingParameters* encoding in parameters.encodings) { + [encodings addObject:@{ + @"active": @(encoding.isActive), + @"minBitrateBps": encoding.minBitrateBps? encoding.minBitrateBps : [NSNumber numberWithInt:0], + @"maxBitrateBps": encoding.maxBitrateBps? encoding.maxBitrateBps : [NSNumber numberWithInt:0], + @"maxFramerate": encoding.maxFramerate? encoding.maxFramerate : @(30), + @"numTemporalLayers": encoding.numTemporalLayers? encoding.numTemporalLayers : @(1), + @"scaleResolutionDownBy": encoding.scaleResolutionDownBy? @(encoding.scaleResolutionDownBy.doubleValue) : [NSNumber numberWithDouble:1.0], + @"ssrc": encoding.ssrc ? encoding.ssrc : [NSNumber numberWithLong:0] + }]; + } + + NSMutableArray *codecs = [NSMutableArray array]; + for (RTCRtpCodecParameters* codec in parameters.codecs) { + [codecs addObject:@{ + @"name": codec.name, + @"payloadType": @(codec.payloadType), + @"clockRate": codec.clockRate, + @"numChannels": codec.numChannels? codec.numChannels : @(1), + @"parameters": codec.parameters, + @"kind": codec.kind + }]; + } + + return @{ + @"transactionId": parameters.transactionId, + @"rtcp": rtcp, + @"headerExtensions": headerExtensions, + @"encodings": encodings, + @"codecs": codecs + }; +} + +-(NSString*)streamTrackStateToString:(RTCMediaStreamTrackState)state { + switch (state) { + case RTCMediaStreamTrackStateLive: + return @"live"; + case RTCMediaStreamTrackStateEnded: + return @"ended"; + default: + break; + } + return @""; +} + +- (NSDictionary*)mediaStreamToMap:(RTCMediaStream *)stream ownerTag:(NSString*)ownerTag { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCMediaStreamTrack* track in stream.audioTracks) { + [audioTracks addObject:[self mediaTrackToMap:track]]; + } + + for (RTCMediaStreamTrack* track in stream.videoTracks) { + [audioTracks addObject:[self mediaTrackToMap:track]]; + } + + return @{ + @"streamId": stream.streamId, + @"ownerTag": ownerTag, + @"audioTracks": audioTracks, + @"videoTracks":videoTracks, + + }; +} + +- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track { + if(track == nil) + return @{}; + NSDictionary *params = @{ + @"enabled": @(track.isEnabled), + @"id": track.trackId, + @"kind": track.kind, + @"label": track.trackId, + @"readyState": [self streamTrackStateToString:track.readyState], + @"remote": @(YES) + }; + return params; +} + +- (NSDictionary*)rtpSenderToMap:(RTCRtpSender *)sender { + NSDictionary *params = @{ + @"senderId": sender.senderId, + @"ownsTrack": @(YES), + @"rtpParameters": [self rtpParametersToMap:sender.parameters], + @"track": [self mediaTrackToMap:sender.track], + @"dtmfSender": [self dtmfSenderToMap:sender.dtmfSender Id:sender.senderId] + }; + return params; +} + +-(NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver { + NSDictionary *params = @{ + @"receiverId": receiver.receiverId, + @"rtpParameters": [self rtpParametersToMap:receiver.parameters], + @"track": [self mediaTrackToMap:receiver.track], + }; + return params; +} + +-(RTCRtpTransceiver*) getRtpTransceiverById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { + for( RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if([transceiver.mid isEqualToString:Id]){ + return transceiver; + } + } + return nil; +} + +-(RTCRtpSender*) getRtpSnderById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { + for( RTCRtpSender* sender in peerConnection.senders) { + if([sender.senderId isEqualToString:Id]){ + return sender; + } + } + return nil; +} + +-(RTCRtpReceiver*) getRtpReceiverById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { + for( RTCRtpReceiver* receiver in peerConnection.receivers) { + if([receiver.receiverId isEqualToString:Id]){ + return receiver; + } + } + return nil; +} + +-(RTCRtpEncodingParameters*)mapToEncoding:(NSDictionary*)map { + RTCRtpEncodingParameters *encoding = [[RTCRtpEncodingParameters alloc] init]; + encoding.isActive = YES; + encoding.scaleResolutionDownBy = [NSNumber numberWithDouble:1.0]; + encoding.numTemporalLayers = [NSNumber numberWithInt:1]; +#if TARGET_OS_IPHONE + encoding.networkPriority = RTCPriorityLow; + encoding.bitratePriority = 1.0; +#endif + [encoding setRid:map[@"rid"]]; + + if(map[@"active"] != nil) { + [encoding setIsActive:((NSNumber*)map[@"active"]).boolValue]; + } + + if(map[@"minBitrateBps"] != nil) { + [encoding setMinBitrateBps:(NSNumber*)map[@"minBitrateBps"]]; + } + + if(map[@"maxBitrateBps"] != nil) { + [encoding setMaxBitrateBps:(NSNumber*)map[@"maxBitrateBps"]]; + } + + if(map[@"maxFramerate"] != nil) { + [encoding setMaxFramerate:(NSNumber*)map[@"maxFramerate"]]; + } + + if(map[@"numTemporalLayers"] != nil) { + [encoding setNumTemporalLayers:(NSNumber*)map[@"numTemporalLayers"]]; + } + + if(map[@"scaleResolutionDownBy"] != nil) { + [encoding setScaleResolutionDownBy:(NSNumber*)map[@"scaleResolutionDownBy"]]; + } + return encoding; +} + +-(RTCRtpTransceiverInit*)mapToTransceiverInit:(NSDictionary*)map { + NSArray* streamIds = map[@"streamIds"]; + NSArray* encodingsParams = map[@"sendEncodings"]; + NSString* direction = map[@"direction"]; + + RTCRtpTransceiverInit* init = [RTCRtpTransceiverInit alloc]; + init.direction = [self stringToTransceiverDirection:direction]; + init.streamIds = streamIds; + + if(encodingsParams != nil) { + NSArray *sendEncodings = [[NSArray alloc] init]; + for (NSDictionary* map in encodingsParams){ + sendEncodings = [sendEncodings arrayByAddingObject:[self mapToEncoding:map]]; + } + [init setSendEncodings:sendEncodings]; + } + return init; +} + +-(RTCRtpMediaType)stringToRtpMediaType:(NSString*)type { + if([type isEqualToString:@"audio"]) { + return RTCRtpMediaTypeAudio; + } else if([type isEqualToString:@"video"]) { + return RTCRtpMediaTypeVideo; + } else if([type isEqualToString:@"data"]) { + return RTCRtpMediaTypeData; + } + return RTCRtpMediaTypeAudio; +} + +-(RTCRtpTransceiverDirection)stringToTransceiverDirection:(NSString*)type { + if([type isEqualToString:@"sendrecv"]) { + return RTCRtpTransceiverDirectionSendRecv; + } else if([type isEqualToString:@"sendonly"]){ + return RTCRtpTransceiverDirectionSendOnly; + } else if([type isEqualToString: @"recvonly"]){ + return RTCRtpTransceiverDirectionRecvOnly; + } else if([type isEqualToString: @"inactive"]){ + return RTCRtpTransceiverDirectionInactive; + } + return RTCRtpTransceiverDirectionInactive; +} + +-(RTCRtpParameters *)mapToRtpParameters:(NSDictionary *)map { + //TODO: + return nil; +} + +-(NSString*)transceiverDirectionString:(RTCRtpTransceiverDirection)direction { + switch (direction) { + case RTCRtpTransceiverDirectionSendRecv: + return @"sendrecv"; + case RTCRtpTransceiverDirectionSendOnly: + return @"sendonly"; + case RTCRtpTransceiverDirectionRecvOnly: + return @"recvonly"; + case RTCRtpTransceiverDirectionInactive: + return @"inactive"; + } + return nil; +} + +-(NSDictionary*)transceiverToMap:(RTCRtpTransceiver*)transceiver { + NSString* mid = transceiver.mid? transceiver.mid : @""; + NSDictionary* params = @{ + @"transceiverId": mid, + @"mid": mid, + @"direction": [self transceiverDirectionString:transceiver.direction], + @"sender": [self rtpSenderToMap:transceiver.sender], + @"receiver": [self receiverToMap:transceiver.receiver] + }; + return params; +} + +@end diff --git a/example/lib/src/loopback_sample.dart b/example/lib/src/loopback_sample.dart index e439e9440b..fe08b29279 100644 --- a/example/lib/src/loopback_sample.dart +++ b/example/lib/src/loopback_sample.dart @@ -17,7 +17,7 @@ class _MyAppState extends State { final _localRenderer = RTCVideoRenderer(); final _remoteRenderer = RTCVideoRenderer(); bool _inCalling = false; - Timer _timer; + //Timer _timer; @override void initState() { @@ -71,8 +71,8 @@ class _MyAppState extends State { } void _onAddStream(MediaStream stream) { - print('addStream: ' + stream.id); - _remoteRenderer.srcObject = stream; + print('New stream: ' + stream.id); + //_remoteRenderer.srcObject = stream; } void _onRemoveStream(MediaStream stream) { @@ -84,6 +84,14 @@ class _MyAppState extends State { _peerConnection.addCandidate(candidate); } + void _onTrack(RTCTrackEvent event) { + print('onTrack'); + if (event.track.kind == 'video' && event.streams.isNotEmpty) { + print('New stream: ' + event.streams[0].id); + _remoteRenderer.srcObject = event.streams[0]; + } + } + void _onRenegotiationNeeded() { print('RenegotiationNeeded'); } @@ -107,7 +115,8 @@ class _MyAppState extends State { var configuration = { 'iceServers': [ {'url': 'stun:stun.l.google.com:19302'}, - ] + ], + 'sdpSemantics': 'unified-plan' }; final offerSdpConstraints = { @@ -128,8 +137,6 @@ class _MyAppState extends State { if (_peerConnection != null) return; try { - _localStream = await navigator.getUserMedia(mediaConstraints); - _localRenderer.srcObject = _localStream; _peerConnection = await createPeerConnection(configuration, loopbackConstraints); @@ -141,19 +148,101 @@ class _MyAppState extends State { _peerConnection.onIceCandidate = _onCandidate; _peerConnection.onRenegotiationNeeded = _onRenegotiationNeeded; + _peerConnection.onTrack = _onTrack; + + _localStream = + await navigator.mediaDevices.getUserMedia(mediaConstraints); + _localRenderer.srcObject = _localStream; + + /* old API await _peerConnection.addStream(_localStream); + // or + var rtpSender = + await _peerConnection.createSender('audio', _localStream.id); + await rtpSender.setTrack(_localStream.getAudioTracks()[0]); + rtpSender = await _peerConnection.createSender('video', _localStream.id); + await rtpSender.setTrack(_localStream.getVideoTracks()[0]); + */ + /* + // Unified-Plan + _localStream.getTracks().forEach((track) { + _peerConnection.addTrack(track, [_localStream]); + }); + */ + // or + + await _peerConnection.addTransceiver( + track: _localStream.getAudioTracks()[0], + init: RTCRtpTransceiverInit( + direction: TransceiverDirection.SendRecv, streams: [_localStream]), + ); + + // ignore: unused_local_variable + var transceiver = await _peerConnection.addTransceiver( + track: _localStream.getVideoTracks()[0], + init: RTCRtpTransceiverInit( + direction: TransceiverDirection.SendRecv, streams: [_localStream]), + ); + + /* + // Unified-Plan Simulcast + await _peerConnection.addTransceiver( + track: _localStream.getVideoTracks()[0], + init: RTCRtpTransceiverInit( + direction: TransceiverDirection.SendOnly, + streams: [_localStream], + sendEncodings: [ + // for firefox order matters... first high resolution, then scaled resolutions... + RTCRtpEncoding( + rid: 'f', + maxBitrateBps: 900000, + numTemporalLayers: 3, + ), + RTCRtpEncoding( + rid: 'h', + numTemporalLayers: 3, + maxBitrateBps: 300000, + scaleResolutionDownBy: 2.0, + ), + RTCRtpEncoding( + rid: 'q', + numTemporalLayers: 3, + maxBitrateBps: 100000, + scaleResolutionDownBy: 4.0, + ), + ], + )); + + await _peerConnection.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); + await _peerConnection.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); + await _peerConnection.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, + init: + RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + */ var description = await _peerConnection.createOffer(offerSdpConstraints); - print(description.sdp); + var sdp = description.sdp; + print('sdp = $sdp'); await _peerConnection.setLocalDescription(description); //change for loopback. description.type = 'answer'; await _peerConnection.setRemoteDescription(description); + + /* Unfied-Plan replaceTrack + var stream = await MediaDevices.getDisplayMedia(mediaConstraints); + _localRenderer.srcObject = _localStream; + await transceiver.sender.replaceTrack(stream.getVideoTracks()[0]); + // do re-negotiation .... + + */ } catch (e) { print(e.toString()); } if (!mounted) return; - _timer = Timer.periodic(Duration(seconds: 1), handleStatsReport); + //_timer = Timer.periodic(Duration(seconds: 1), handleStatsReport); setState(() { _inCalling = true; @@ -173,7 +262,7 @@ class _MyAppState extends State { setState(() { _inCalling = false; }); - _timer.cancel(); + //_timer.cancel(); } void _sendDtmf() async { diff --git a/example/scripts/project_tools.sh b/example/scripts/project_tools.sh index 355266f7e5..130e561006 100755 --- a/example/scripts/project_tools.sh +++ b/example/scripts/project_tools.sh @@ -37,7 +37,7 @@ function add_permission_label() { echo "" echo "Add permission labels to AndroidManifest.xml." echo "" - python add-line.py -i ../android/app/build.gradle -s 'minSdkVersion 16' -t 'minSdkVersion 18' -r + python add-line.py -i ../android/app/build.gradle -s 'minSdkVersion 16' -t 'minSdkVersion 21' -r python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' diff --git a/ios/Classes/FlutterRPScreenRecorder.h b/ios/Classes/FlutterRPScreenRecorder.h deleted file mode 100644 index f951a3d4ba..0000000000 --- a/ios/Classes/FlutterRPScreenRecorder.h +++ /dev/null @@ -1,9 +0,0 @@ -#import - -@interface FlutterRPScreenRecorder : RTCVideoCapturer - --(void)startCapture; - --(void)stopCapture; - -@end diff --git a/ios/Classes/FlutterRPScreenRecorder.h b/ios/Classes/FlutterRPScreenRecorder.h new file mode 120000 index 0000000000..a34a3193c9 --- /dev/null +++ b/ios/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.h \ No newline at end of file diff --git a/ios/Classes/FlutterRPScreenRecorder.m b/ios/Classes/FlutterRPScreenRecorder.m deleted file mode 100644 index aa61614915..0000000000 --- a/ios/Classes/FlutterRPScreenRecorder.m +++ /dev/null @@ -1,72 +0,0 @@ -#import "FlutterRPScreenRecorder.h" -#import - -//See: https://developer.apple.com/videos/play/wwdc2017/606/ - -@implementation FlutterRPScreenRecorder { - RPScreenRecorder *screenRecorder; - RTCVideoSource *source; -} - -- (instancetype)initWithDelegate:(__weak id)delegate { - source = delegate; - return [super initWithDelegate:delegate]; -} - --(void)startCapture -{ - if(screenRecorder == NULL) - screenRecorder = [RPScreenRecorder sharedRecorder]; - - [screenRecorder setMicrophoneEnabled:NO]; - - if (![screenRecorder isAvailable]) { - NSLog(@"Screen recorder is not available!"); - return; - } - - [screenRecorder startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) { - if (bufferType == RPSampleBufferTypeVideo) {// We want video only now - [self handleSourceBuffer:sampleBuffer sampleType:bufferType]; - } - } completionHandler:^(NSError * _Nullable error) { - if (error != nil) - NSLog(@"!!! startCaptureWithHandler/completionHandler %@ !!!", error); - }]; -} - --(void)stopCapture -{ - [screenRecorder stopCaptureWithHandler:^(NSError * _Nullable error) { - if (error != nil) - NSLog(@"!!! stopCaptureWithHandler/completionHandler %@ !!!", error); - }]; -} - --(void)handleSourceBuffer:(CMSampleBufferRef)sampleBuffer sampleType:(RPSampleBufferType)sampleType -{ - if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || - !CMSampleBufferDataIsReady(sampleBuffer)) { - return; - } - - CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - if (pixelBuffer == nil) { - return; - } - - size_t width = CVPixelBufferGetWidth(pixelBuffer); - size_t height = CVPixelBufferGetHeight(pixelBuffer); - - [source adaptOutputFormatToWidth:width/2 height:height/2 fps:8]; - - RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; - int64_t timeStampNs = - CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC; - RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer - rotation:RTCVideoRotation_0 - timeStampNs:timeStampNs]; - [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; -} - -@end diff --git a/ios/Classes/FlutterRPScreenRecorder.m b/ios/Classes/FlutterRPScreenRecorder.m new file mode 120000 index 0000000000..f4e4d34067 --- /dev/null +++ b/ios/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDataChannel.h b/ios/Classes/FlutterRTCDataChannel.h deleted file mode 100755 index c2e039f072..0000000000 --- a/ios/Classes/FlutterRTCDataChannel.h +++ /dev/null @@ -1,28 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import - -@interface RTCDataChannel (Flutter) -@property (nonatomic, strong) NSString *peerConnectionId; -@property (nonatomic, strong) NSNumber *flutterChannelId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCDataChannel) - - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(nonnull NSString *)label - config:(nonnull RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger; - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId; - - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(nonnull NSString *)data - type:(nonnull NSString *)type; - -@end diff --git a/ios/Classes/FlutterRTCDataChannel.h b/ios/Classes/FlutterRTCDataChannel.h new file mode 120000 index 0000000000..ca751533c4 --- /dev/null +++ b/ios/Classes/FlutterRTCDataChannel.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDataChannel.m b/ios/Classes/FlutterRTCDataChannel.m deleted file mode 100755 index e408c53076..0000000000 --- a/ios/Classes/FlutterRTCDataChannel.m +++ /dev/null @@ -1,165 +0,0 @@ -#import -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCPeerConnection.h" -#import - -@implementation RTCDataChannel (Flutter) - -- (NSString *)peerConnectionId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setPeerConnectionId:(NSString *)peerConnectionId -{ - objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink )eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSNumber *)flutterChannelId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterChannelId:(NSNumber *)flutterChannelId -{ - objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (RTCDataChannel) - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(NSString *)label - config:(RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; - - if (nil != dataChannel) { - dataChannel.peerConnectionId = peerConnectionId; - NSNumber *dataChannelId = [NSNumber numberWithInteger:config.channelId]; - peerConnection.dataChannels[dataChannelId] = dataChannel; - dataChannel.flutterChannelId = dataChannelId; - dataChannel.delegate = self; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnectionId, [dataChannelId intValue]] - binaryMessenger:messenger]; - - dataChannel.eventChannel = eventChannel; - [eventChannel setStreamHandler:dataChannel]; - } -} - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - RTCDataChannel *dataChannel = dataChannels[dataChannelId]; - FlutterEventChannel *eventChannel = dataChannel.eventChannel; - [eventChannel setStreamHandler:nil]; - dataChannel.eventChannel = nil; - [dataChannel close]; - [dataChannels removeObjectForKey:dataChannelId]; -} - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(id)data - type:(NSString *)type -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = peerConnection.dataChannels[dataChannelId]; - - NSData *bytes = [type isEqualToString:@"binary"] ? - ((FlutterStandardTypedData*)data).data : - [data dataUsingEncoding:NSUTF8StringEncoding]; - - RTCDataBuffer *buffer = [[RTCDataBuffer alloc] initWithData:bytes isBinary:[type isEqualToString:@"binary"]]; - [dataChannel sendData:buffer]; -} - -- (NSString *)stringForDataChannelState:(RTCDataChannelState)state -{ - switch (state) { - case RTCDataChannelStateConnecting: return @"connecting"; - case RTCDataChannelStateOpen: return @"open"; - case RTCDataChannelStateClosing: return @"closing"; - case RTCDataChannelStateClosed: return @"closed"; - } - return nil; -} - -#pragma mark - RTCDataChannelDelegate methods - -// Called when the data channel state has changed. -- (void)dataChannelDidChangeState:(RTCDataChannel*)channel -{ - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelStateChanged", - @"id": channel.flutterChannelId, - @"state": [self stringForDataChannelState:channel.readyState]}); - } -} - -// Called when a data buffer was successfully received. -- (void)dataChannel:(RTCDataChannel *)channel didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer -{ - NSString *type; - id data; - if (buffer.isBinary) { - type = @"binary"; - data = [FlutterStandardTypedData typedDataWithBytes:buffer.data]; - } else { - type = @"text"; - data = [[NSString alloc] initWithData:buffer.data - encoding:NSUTF8StringEncoding]; - } - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelReceiveMessage", - @"id": channel.flutterChannelId, - @"type": type, - @"data": (data ? data : [NSNull null])}); - } -} - -@end diff --git a/ios/Classes/FlutterRTCDataChannel.m b/ios/Classes/FlutterRTCDataChannel.m new file mode 120000 index 0000000000..2c6a822406 --- /dev/null +++ b/ios/Classes/FlutterRTCDataChannel.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCapturer.h b/ios/Classes/FlutterRTCFrameCapturer.h deleted file mode 100644 index 03e7655595..0000000000 --- a/ios/Classes/FlutterRTCFrameCapturer.h +++ /dev/null @@ -1,8 +0,0 @@ -#import -#import - -@interface FlutterRTCFrameCapturer : NSObject - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result; - -@end diff --git a/ios/Classes/FlutterRTCFrameCapturer.h b/ios/Classes/FlutterRTCFrameCapturer.h new file mode 120000 index 0000000000..b732660b2f --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCapturer.m b/ios/Classes/FlutterRTCFrameCapturer.m deleted file mode 100644 index 34ae78f1a8..0000000000 --- a/ios/Classes/FlutterRTCFrameCapturer.m +++ /dev/null @@ -1,82 +0,0 @@ -#import - -#import "FlutterRTCFrameCapturer.h" - -#include "libyuv.h" - -@import CoreImage; -@import CoreVideo; - -@implementation FlutterRTCFrameCapturer { - RTCVideoTrack* _track; - NSString* _path; - FlutterResult _result; - bool _gotFrame; -} - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result -{ - self = [super init]; - if (self) { - _gotFrame = false; - _track = track; - _path = path; - _result = result; - [track addRenderer:self]; - } - return self; -} - -- (void)setSize:(CGSize)size -{ -} - -- (void)renderFrame:(nullable RTCVideoFrame *)frame -{ - if (_gotFrame || frame == nil) return; - _gotFrame = true; - - id buffer = frame.buffer; - CVPixelBufferRef pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; - - CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; - CIContext *context = [CIContext contextWithOptions:nil]; - CGImageRef cgImage = [context createCGImage:ciImage - fromRect:CGRectMake(0, 0, frame.width, frame.height)]; - - UIImageOrientation orientation; - switch (frame.rotation) { - case RTCVideoRotation_90: - orientation = UIImageOrientationRight; - break; - case RTCVideoRotation_180: - orientation = UIImageOrientationDown; - break; - case RTCVideoRotation_270: - orientation = UIImageOrientationLeft; - default: - orientation = UIImageOrientationUp; - break; - } - - UIImage *uiImage = [UIImage imageWithCGImage:cgImage scale:1 orientation:orientation]; - CGImageRelease(cgImage); - NSData *jpgData = UIImageJPEGRepresentation(uiImage, 0.9f); - - if ([jpgData writeToFile:_path atomically:NO]) { - NSLog(@"File writed successfully to %@", _path); - _result(nil); - } else { - NSLog(@"Failed to write to file"); - _result([FlutterError errorWithCode:@"CaptureFrameFailed" - message:@"Failed to write JPEG data to file" - details:nil]); - } - - dispatch_async(dispatch_get_main_queue(), ^{ - [self->_track removeRenderer:self]; - self->_track = nil; - }); -} - -@end diff --git a/ios/Classes/FlutterRTCFrameCapturer.m b/ios/Classes/FlutterRTCFrameCapturer.m new file mode 120000 index 0000000000..36b15d7c6a --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaStream.h b/ios/Classes/FlutterRTCMediaStream.h deleted file mode 100644 index 12f1633cde..0000000000 --- a/ios/Classes/FlutterRTCMediaStream.h +++ /dev/null @@ -1,29 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" - -@interface FlutterWebRTCPlugin (RTCMediaStream) - --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)createLocalMediaStream:(FlutterResult)result; - --(void)getSources:(FlutterResult)result; - --(void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack *)track - result:(FlutterResult) result; - --(void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack *)track - torch:(BOOL) torch - result:(FlutterResult) result; - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track - result:(FlutterResult) result; - --(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track - toPath:(NSString *) path - result:(FlutterResult) result; -@end diff --git a/ios/Classes/FlutterRTCMediaStream.h b/ios/Classes/FlutterRTCMediaStream.h new file mode 120000 index 0000000000..a56c382c17 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaStream.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaStream.m b/ios/Classes/FlutterRTCMediaStream.m deleted file mode 100755 index 322766b39a..0000000000 --- a/ios/Classes/FlutterRTCMediaStream.m +++ /dev/null @@ -1,613 +0,0 @@ -#import - -#import - -#import "FlutterRTCFrameCapturer.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRPScreenRecorder.h" - -@implementation AVCaptureDevice (Flutter) - -- (NSString*)positionString { - switch (self.position) { - case AVCaptureDevicePositionUnspecified: return @"unspecified"; - case AVCaptureDevicePositionBack: return @"back"; - case AVCaptureDevicePositionFront: return @"front"; - } - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCMediaStream) - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} - */ -typedef void (^NavigatorUserMediaErrorCallback)(NSString *errorType, NSString *errorMessage); - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} - */ -typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream *mediaStream); - -- (RTCMediaConstraints *)defaultMediaStreamConstraints { - NSDictionary *mandatoryConstraints - = @{ @"minWidth" : @"1280", - @"minHeight" : @"720", - @"minFrameRate" : @"30" }; - RTCMediaConstraints* constraints = - [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:mandatoryConstraints - optionalConstraints:nil]; - return constraints; -} - -/** - * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the audio-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCAudioTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCAudioTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserAudio:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - NSString *trackId = [[NSUUID UUID] UUIDString]; - RTCAudioTrack *audioTrack - = [self.peerConnectionFactory audioTrackWithTrackId:trackId]; - - [mediaStream addAudioTrack:audioTrack]; - - successCallback(mediaStream); -} - -// TODO: Use RCTConvert for constraints ... --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult) result { - // Initialize RTCMediaStream with a unique label in order to allow multiple - // RTCMediaStream instances initialized by multiple getUserMedia calls to be - // added to 1 RTCPeerConnection instance. As suggested by - // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good - // practice, use a UUID (conforming to RFC4122). - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream - = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - [self - getUserMedia:constraints - successCallback:^ (RTCMediaStream *mediaStream) { - NSString *mediaStreamId = mediaStream.streamId; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in mediaStream.audioTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); - } - errorCallback:^ (NSString *errorType, NSString *errorMessage) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] - message:errorMessage - details:nil]); - } - mediaStream:mediaStream]; -} - -/** - * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which - * satisfies specific constraints and adds it to a specific - * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track - * of the respective media type and the specified {@code constraints} specify - * that a track of the respective media type is required; otherwise, reports - * success for the specified {@code mediaStream} to a specific - * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media - * type-specific iteration of or successfully concludes the - * {@code getUserMedia()} algorithm. The method will be recursively invoked to - * conclude the whole {@code getUserMedia()} algorithm either with (successful) - * satisfaction of the specified {@code constraints} or with failure. - * - * @param constraints The {@code MediaStreamConstraints} which specifies the - * requested media types and which the new {@code RTCAudioTrack} or - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm. - */ -- (void)getUserMedia:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // If mediaStream contains no audioTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local audio content. - if (mediaStream.audioTracks.count == 0) { - // constraints.audio - id audioConstraints = constraints[@"audio"]; - BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; - if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { - [self requestAccessForMediaType:AVMediaTypeAudio - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } - } - - // If mediaStream contains no videoTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local video content. - if (mediaStream.videoTracks.count == 0) { - // constraints.video - id videoConstraints = constraints[@"video"]; - if (videoConstraints) { - BOOL requestAccessForVideo - = [videoConstraints isKindOfClass:[NSNumber class]] - ? [videoConstraints boolValue] - : [videoConstraints isKindOfClass:[NSDictionary class]]; -#if !TARGET_IPHONE_SIMULATOR - if (requestAccessForVideo) { - [self requestAccessForMediaType:AVMediaTypeVideo - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } -#endif - } - } - - // There are audioTracks and/or videoTracks in mediaStream as requested by - // constraints so the getUserMedia() is to conclude with success. - successCallback(mediaStream); -} - -/** - * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the video-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCVideoTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserVideo:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - id videoConstraints = constraints[@"video"]; - AVCaptureDevice *videoDevice; - if ([videoConstraints isKindOfClass:[NSDictionary class]]) { - // constraints.video.optional - id optionalVideoConstraints = videoConstraints[@"optional"]; - if (optionalVideoConstraints - && [optionalVideoConstraints isKindOfClass:[NSArray class]]) { - NSArray *options = optionalVideoConstraints; - for (id item in options) { - if ([item isKindOfClass:[NSDictionary class]]) { - NSString *sourceId = ((NSDictionary *)item)[@"sourceId"]; - if (sourceId) { - videoDevice = [AVCaptureDevice deviceWithUniqueID:sourceId]; - if (videoDevice) { - break; - } - } - } - } - } - if (!videoDevice) { - // constraints.video.facingMode - // - // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode - id facingMode = videoConstraints[@"facingMode"]; - if (facingMode && [facingMode isKindOfClass:[NSString class]]) { - AVCaptureDevicePosition position; - if ([facingMode isEqualToString:@"environment"]) { - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionBack; - } else if ([facingMode isEqualToString:@"user"]) { - self._usingFrontCamera = YES; - position = AVCaptureDevicePositionFront; - } else { - // If the specified facingMode value is not supported, fall back to - // the default video device. - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionUnspecified; - } - videoDevice = [self findDeviceForPosition:position]; - } - } - if (!videoDevice) { - videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - } - - //TODO(rostopira): refactor to separate function and add support for max - - self._targetWidth = 1280; - self._targetHeight = 720; - self._targetFps = 30; - - id mandatory = videoConstraints[@"mandatory"]; - // constraints.video.mandatory - if(mandatory && [mandatory isKindOfClass:[NSDictionary class]]) - { - id widthConstraint = mandatory[@"minWidth"]; - if ([widthConstraint isKindOfClass:[NSString class]]) { - int possibleWidth = [widthConstraint intValue]; - if (possibleWidth != 0) { - self._targetWidth = possibleWidth; - } - } - id heightConstraint = mandatory[@"minHeight"]; - if ([heightConstraint isKindOfClass:[NSString class]]) { - int possibleHeight = [heightConstraint intValue]; - if (possibleHeight != 0) { - self._targetHeight = possibleHeight; - } - } - id fpsConstraint = mandatory[@"minFrameRate"]; - if ([fpsConstraint isKindOfClass:[NSString class]]) { - int possibleFps = [fpsConstraint intValue]; - if (possibleFps != 0) { - self._targetFps = possibleFps; - } - } - } - - if (videoDevice) { - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - if (self.videoCapturer) { - [self.videoCapturer stopCapture]; - } - self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - NSInteger selectedFps = [self selectFpsForFormat:selectedFormat]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:selectedFps completionHandler:^(NSError *error) { - if (error) { - NSLog(@"Start capture error: %@", [error localizedDescription]); - } - }]; - - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - successCallback(mediaStream); - } else { - // According to step 6.2.3 of the getUserMedia() algorithm, if there is no - // source, fail with a new OverconstrainedError. - errorCallback(@"OverconstrainedError", /* errorMessage */ nil); - } -} - --(void)mediaStreamRelease:(RTCMediaStream *)stream -{ - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:stream.streamId]; - } -} - - -/** - * Obtains local media content of a specific type. Requests access for the - * specified {@code mediaType} if necessary. In other words, implements a media - * type-specific iteration of the {@code getUserMedia()} algorithm. - * - * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} - * which specifies the type of the local media content to obtain. - * @param constraints The {@code MediaStreamConstraints} which are to be - * satisfied by the obtained local media content. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is to collect the - * obtained local media content of the specified {@code mediaType}. - */ -- (void)requestAccessForMediaType:(NSString *)mediaType - constraints:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // According to step 6.2.1 of the getUserMedia() algorithm, if there is no - // source, fail "with a new DOMException object whose name attribute has the - // value NotFoundError." - // XXX The following approach does not work for audio in Simulator. That is - // because audio capture is done using AVAudioSession which does not use - // AVCaptureDevice there. Anyway, Simulator will not (visually) request access - // for audio. - if (mediaType == AVMediaTypeVideo - && [AVCaptureDevice devicesWithMediaType:mediaType].count == 0) { - // Since successCallback and errorCallback are asynchronously invoked - // elsewhere, make sure that the invocation here is consistent. - dispatch_async(dispatch_get_main_queue(), ^ { - errorCallback(@"DOMException", @"NotFoundError"); - }); - return; - } - - [AVCaptureDevice - requestAccessForMediaType:mediaType - completionHandler:^ (BOOL granted) { - dispatch_async(dispatch_get_main_queue(), ^ { - if (granted) { - NavigatorUserMediaSuccessCallback scb - = ^ (RTCMediaStream *mediaStream) { - [self getUserMedia:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - }; - - if (mediaType == AVMediaTypeAudio) { - [self getUserAudio:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } else if (mediaType == AVMediaTypeVideo) { - [self getUserVideo:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } - } else { - // According to step 10 Permission Failure of the getUserMedia() - // algorithm, if the user has denied permission, fail "with a new - // DOMException object whose name attribute has the value - // NotAllowedError." - errorCallback(@"DOMException", @"NotAllowedError"); - } - }); - }]; -} - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result { - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - FlutterRPScreenRecorder *screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource]; - - [screenCapturer startCapture]; - - //TODO: - self.videoCapturer = screenCapturer; - - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); -} - --(void)createLocalMediaStream:(FlutterResult)result{ - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": [mediaStream streamId] }); -} - --(void)getSources:(FlutterResult)result{ - NSMutableArray *sources = [NSMutableArray array]; - NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - for (AVCaptureDevice *device in videoDevices) { - [sources addObject:@{ - @"facing": device.positionString, - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"videoinput", - }]; - } - NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; - for (AVCaptureDevice *device in audioDevices) { - [sources addObject:@{ - @"facing": @"", - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"audioinput", - }]; - } - result(@{@"sources": sources}); -} - --(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track -{ - // what's different to mediaStreamTrackStop? only call mediaStream explicitly? - if (mediaStream && track) { - track.isEnabled = NO; - // FIXME this is called when track is removed from the MediaStream, - // but it doesn't mean it can not be added back using MediaStream.addTrack - //TODO: [self.localTracks removeObjectForKey:trackID]; - if ([track.kind isEqualToString:@"audio"]) { - [mediaStream removeAudioTrack:(RTCAudioTrack *)track]; - } else if([track.kind isEqualToString:@"video"]) { - [mediaStream removeVideoTrack:(RTCVideoTrack *)track]; - } - } -} - --(void)mediaStreamTrackSetEnabled:(RTCMediaStreamTrack *)track : (BOOL)enabled -{ - if (track && track.isEnabled != enabled) { - track.isEnabled = enabled; - } -} - --(void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack *)track result:(FlutterResult) result -{ - if (!self.videoCapturer) { - result(@NO); - return; - } - if (self.videoCapturer.captureSession.inputs.count == 0) { - result(@NO); - return; - } - - AVCaptureDeviceInput *deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; - AVCaptureDevice *device = deviceInput.device; - - result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); -} - --(void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack *)track torch:(BOOL)torch result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't set torch"); - return; - } - if (self.videoCapturer.captureSession.inputs.count == 0) { - NSLog(@"Video capturer is missing an input. Can't set torch"); - return; - } - - AVCaptureDeviceInput *deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; - AVCaptureDevice *device = deviceInput.device; - - if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { - NSLog(@"Current capture device does not support torch. Can't set torch"); - return; - } - - NSError *error; - if ([device lockForConfiguration:&error] == NO) { - NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); - return; - } - - device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; - [device unlockForConfiguration]; - - result(nil); -} - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't switch camera"); - return; - } - self._usingFrontCamera = !self._usingFrontCamera; - AVCaptureDevicePosition position = self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; - AVCaptureDevice *videoDevice = [self findDeviceForPosition:position]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:[self selectFpsForFormat:selectedFormat] completionHandler:^(NSError* error){ - if (error != nil) { - result([FlutterError errorWithCode:@"Error while switching camera" message:@"Error while switching camera" details:error]); - } else { - result([NSNumber numberWithBool:self._usingFrontCamera]); - } - }]; -} - --(void)mediaStreamTrackCaptureFrame:(RTCVideoTrack *)track toPath:(NSString *) path result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't capture frame."); - return; - } - - FlutterRTCFrameCapturer *capturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track toPath:path result:result]; -} - --(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track -{ - if (track) { - track.isEnabled = NO; - [self.localTracks removeObjectForKey:track.trackId]; - } -} - -- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position { - if (position == AVCaptureDevicePositionUnspecified) { - return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; - for (AVCaptureDevice *device in captureDevices) { - if (device.position == position) { - return device; - } - } - return captureDevices[0]; -} - -- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device { - NSArray *formats = - [RTCCameraVideoCapturer supportedFormatsForDevice:device]; - AVCaptureDeviceFormat *selectedFormat = nil; - int currentDiff = INT_MAX; - for (AVCaptureDeviceFormat *format in formats) { - CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); - int diff = abs(self._targetWidth - dimension.width) + abs(self._targetHeight - dimension.height); - if (diff < currentDiff) { - selectedFormat = format; - currentDiff = diff; - } else if (diff == currentDiff && pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { - selectedFormat = format; - } - } - return selectedFormat; -} - -- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format { - Float64 maxSupportedFramerate = 0; - for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) { - maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); - } - return fmin(maxSupportedFramerate, self._targetFps); -} - -@end diff --git a/ios/Classes/FlutterRTCMediaStream.m b/ios/Classes/FlutterRTCMediaStream.m new file mode 120000 index 0000000000..2e988ad614 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaStream.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCPeerConnection.h b/ios/Classes/FlutterRTCPeerConnection.h deleted file mode 100755 index b99f885b0a..0000000000 --- a/ios/Classes/FlutterRTCPeerConnection.h +++ /dev/null @@ -1,43 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -@interface RTCPeerConnection (Flutter) -@property (nonatomic, strong) NSMutableDictionary *dataChannels; -@property (nonatomic, strong) NSMutableDictionary *remoteStreams; -@property (nonatomic, strong) NSMutableDictionary *remoteTracks; -@property (nonatomic, strong) NSString *flutterId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result; - --(RTCMediaConstraints *) parseMediaConstraints:(nonnull NSDictionary *)constraints; - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection; - -@end diff --git a/ios/Classes/FlutterRTCPeerConnection.h b/ios/Classes/FlutterRTCPeerConnection.h new file mode 120000 index 0000000000..c4907a3db8 --- /dev/null +++ b/ios/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCPeerConnection.m b/ios/Classes/FlutterRTCPeerConnection.m deleted file mode 100755 index 6517f85ffe..0000000000 --- a/ios/Classes/FlutterRTCPeerConnection.m +++ /dev/null @@ -1,504 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCDataChannel.h" - -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import - -@implementation RTCPeerConnection (Flutter) - -@dynamic eventSink; - -- (NSString *)flutterId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterId:(NSString *)flutterId -{ - objc_setAssociatedObject(self, @selector(flutterId), flutterId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink)eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)dataChannels -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setDataChannels:(NSMutableDictionary *)dataChannels -{ - objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteStreams -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteStreams:(NSMutableDictionary *)remoteStreams -{ - objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteTracks -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteTracks:(NSMutableDictionary *)remoteTracks -{ - objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection -{ - [peerConnection setConfiguration:configuration]; -} - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result -{ - [peerConnection - offerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateOfferFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection - answerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateAnswerFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setLocalDescription:sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetLocalDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setRemoteDescription: sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetRemoteDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection addIceCandidate:candidate]; - result(nil); - //NSLog(@"addICECandidateresult: %@", candidate); -} - --(void) peerConnectionClose:(RTCPeerConnection *)peerConnection -{ - [peerConnection close]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels - = peerConnection.dataChannels; - for (NSString *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; -} - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result -{ - RTCMediaStreamTrack *track = nil; - if (!trackID - || !trackID.length - || (track = self.localTracks[trackID]) - || (track = peerConnection.remoteTracks[trackID])) { - [peerConnection statsForTrack:track - statsOutputLevel:RTCStatsOutputLevelStandard - completionHandler:^(NSArray *reports) { - - NSMutableArray *stats = [NSMutableArray array]; - - for (RTCLegacyStatsReport *report in reports) { - [stats addObject:@{@"id": report.reportId, - @"type": report.type, - @"timestamp": @(report.timestamp), - @"values": report.values - }]; - } - - result(@{@"stats": stats}); - }]; - }else{ - result([FlutterError errorWithCode:@"GetStatsFailed" - message:[NSString stringWithFormat:@"Error %@", @""] - details:nil]); - } -} - -- (NSString *)stringForICEConnectionState:(RTCIceConnectionState)state { - switch (state) { - case RTCIceConnectionStateNew: return @"new"; - case RTCIceConnectionStateChecking: return @"checking"; - case RTCIceConnectionStateConnected: return @"connected"; - case RTCIceConnectionStateCompleted: return @"completed"; - case RTCIceConnectionStateFailed: return @"failed"; - case RTCIceConnectionStateDisconnected: return @"disconnected"; - case RTCIceConnectionStateClosed: return @"closed"; - case RTCIceConnectionStateCount: return @"count"; - } - return nil; -} - -- (NSString *)stringForICEGatheringState:(RTCIceGatheringState)state { - switch (state) { - case RTCIceGatheringStateNew: return @"new"; - case RTCIceGatheringStateGathering: return @"gathering"; - case RTCIceGatheringStateComplete: return @"complete"; - } - return nil; -} - -- (NSString *)stringForSignalingState:(RTCSignalingState)state { - switch (state) { - case RTCSignalingStateStable: return @"stable"; - case RTCSignalingStateHaveLocalOffer: return @"have-local-offer"; - case RTCSignalingStateHaveLocalPrAnswer: return @"have-local-pranswer"; - case RTCSignalingStateHaveRemoteOffer: return @"have-remote-offer"; - case RTCSignalingStateHaveRemotePrAnswer: return @"have-remote-pranswer"; - case RTCSignalingStateClosed: return @"closed"; - } - return nil; -} - - -/** - * Parses the constraint keys and values of a specific JavaScript object into - * a specific NSMutableDictionary in a format suitable for the - * initialization of a RTCMediaConstraints instance. - * - * @param src The JavaScript object which defines constraint keys and values and - * which is to be parsed into the specified dst. - * @param dst The NSMutableDictionary into which the constraint keys - * and values defined by src are to be written in a format suitable for - * the initialization of a RTCMediaConstraints instance. - */ -- (void)parseJavaScriptConstraints:(NSDictionary *)src - intoWebRTCConstraints:(NSMutableDictionary *)dst { - for (id srcKey in src) { - id srcValue = src[srcKey]; - NSString *dstValue; - - if ([srcValue isKindOfClass:[NSNumber class]]) { - dstValue = [srcValue boolValue] ? @"true" : @"false"; - } else { - dstValue = [srcValue description]; - } - dst[[srcKey description]] = dstValue; - } -} - -/** - * Parses a JavaScript object into a new RTCMediaConstraints instance. - * - * @param constraints The JavaScript object to parse into a new - * RTCMediaConstraints instance. - * @returns A new RTCMediaConstraints instance initialized with the - * mandatory and optional constraint keys and values specified by - * constraints. - */ -- (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints { - id mandatory = constraints[@"mandatory"]; - NSMutableDictionary *mandatory_ - = [NSMutableDictionary new]; - - if ([mandatory isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)mandatory - intoWebRTCConstraints:mandatory_]; - } - - id optional = constraints[@"optional"]; - NSMutableDictionary *optional_ - = [NSMutableDictionary new]; - - if ([optional isKindOfClass:[NSArray class]]) { - for (id o in (NSArray *)optional) { - if ([o isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)o - intoWebRTCConstraints:optional_]; - } - } - } - - return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ - optionalConstraints:optional_]; -} - -#pragma mark - RTCPeerConnectionDelegate methods - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingState:(RTCSignalingState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"signalingState", - @"state" : [self stringForSignalingState:newState]}); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didAddTrack:(RTCVideoTrack*)track{ - - peerConnection.remoteTracks[track.trackId] = track; - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didRemoveTrack:(RTCVideoTrack*)track{ - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - NSString *streamId = stream.streamId; - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream { - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in stream.audioTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in stream.videoTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddStream", - @"streamId": streamId, - @"audioTracks": audioTracks, - @"videoTracks": videoTracks, - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream { - NSArray *keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; - // We assume there can be only one object for 1 key - if (keysArray.count > 1) { - NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", stream.streamId); - } - NSString *streamId = stream.streamId; - - for (RTCVideoTrack *track in stream.videoTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - [peerConnection.remoteStreams removeObjectForKey:streamId]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveStream", - @"streamId": streamId, - }); - } -} - -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{@"event" : @"onRenegotiationNeeded",}); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceConnectionState", - @"state" : [self stringForICEConnectionState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceGatheringState", - @"state" : [self stringForICEGatheringState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onCandidate", - @"candidate" : @{@"candidate": candidate.sdp, @"sdpMLineIndex": @(candidate.sdpMLineIndex), @"sdpMid": candidate.sdpMid} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RTCDataChannel*)dataChannel { - if (-1 == dataChannel.channelId) { - return; - } - - NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; - dataChannel.peerConnectionId = peerConnection.flutterId; - dataChannel.delegate = self; - peerConnection.dataChannels[dataChannelId] = dataChannel; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnection.flutterId, dataChannel.channelId] - binaryMessenger:self.messenger]; - - dataChannel.eventChannel = eventChannel; - dataChannel.flutterChannelId = dataChannelId; - [eventChannel setStreamHandler:dataChannel]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"didOpenDataChannel", - @"id": dataChannelId, - @"label": dataChannel.label - }); - } -} - -@end - diff --git a/ios/Classes/FlutterRTCPeerConnection.m b/ios/Classes/FlutterRTCPeerConnection.m new file mode 120000 index 0000000000..363aecf0c7 --- /dev/null +++ b/ios/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoRenderer.h b/ios/Classes/FlutterRTCVideoRenderer.h deleted file mode 100755 index d1bd7b8675..0000000000 --- a/ios/Classes/FlutterRTCVideoRenderer.h +++ /dev/null @@ -1,33 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -#import -#import -#import -#import - -@interface FlutterRTCVideoRenderer : NSObject - -/** - * The {@link RTCVideoTrack}, if any, which this instance renders. - */ -@property (nonatomic, strong) RTCVideoTrack *videoTrack; -@property (nonatomic) int64_t textureId; -@property (nonatomic, weak) id registry; -@property (nonatomic, strong) FlutterEventSink eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - -- (void)dispose; - -@end - - -@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view peerConnectionId:(NSString *)peerConnectionId; - -@end diff --git a/ios/Classes/FlutterRTCVideoRenderer.h b/ios/Classes/FlutterRTCVideoRenderer.h new file mode 120000 index 0000000000..2e68777e02 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoRenderer.m b/ios/Classes/FlutterRTCVideoRenderer.m deleted file mode 100755 index 1f112b24ae..0000000000 --- a/ios/Classes/FlutterRTCVideoRenderer.m +++ /dev/null @@ -1,282 +0,0 @@ -#import "FlutterRTCVideoRenderer.h" - -#import -#import -#import -#import - -#import -#include "libyuv.h" - -#import "FlutterWebRTCPlugin.h" - -@implementation FlutterRTCVideoRenderer { - CGSize _frameSize; - CGSize _renderSize; - CVPixelBufferRef _pixelBufferRef; - RTCVideoRotation _rotation; - FlutterEventChannel* _eventChannel; - bool _isFirstFrameRendered; -} - -@synthesize textureId = _textureId; -@synthesize registry = _registry; -@synthesize eventSink = _eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - self = [super init]; - if (self){ - _isFirstFrameRendered = false; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - _registry = registry; - _pixelBufferRef = nil; - _eventSink = nil; - _rotation = -1; - _textureId = [registry registerTexture:self]; - /*Create Event Channel.*/ - _eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId] - binaryMessenger:messenger]; - [_eventChannel setStreamHandler:self]; - } - return self; -} - --(void)dealloc { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } -} - -- (CVPixelBufferRef)copyPixelBuffer { - if(_pixelBufferRef != nil){ - CVBufferRetain(_pixelBufferRef); - return _pixelBufferRef; - } - return nil; -} - --(void)dispose{ - [_registry unregisterTexture:_textureId]; -} - -- (void)setVideoTrack:(RTCVideoTrack *)videoTrack { - RTCVideoTrack *oldValue = self.videoTrack; - - if (oldValue != videoTrack) { - _isFirstFrameRendered = false; - if (oldValue) { - [oldValue removeRenderer:self]; - } - _videoTrack = videoTrack; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - if (videoTrack) { - [videoTrack addRenderer:self]; - } - } -} - - --(id) correctRotation:(const id) src - withRotation:(RTCVideoRotation) rotation -{ - - int rotated_width = src.width; - int rotated_height = src.height; - - if (rotation == RTCVideoRotation_90 || - rotation == RTCVideoRotation_270) { - int temp = rotated_width; - rotated_width = rotated_height; - rotated_height = temp; - } - - id buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width height:rotated_height]; - - I420Rotate(src.dataY, src.strideY, - src.dataU, src.strideU, - src.dataV, src.strideV, - (uint8_t*)buffer.dataY, buffer.strideY, - (uint8_t*)buffer.dataU,buffer.strideU, - (uint8_t*)buffer.dataV, buffer.strideV, - src.width, src.height, - (RotationModeEnum)rotation); - - return buffer; -} - --(void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer withFrame:(RTCVideoFrame *) frame -{ - id i420Buffer = [self correctRotation:[frame.buffer toI420] withRotation:frame.rotation]; - CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); - - const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); - if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || - pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { - // NV12 - uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); - const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); - uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); - const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); - - I420ToNV12(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dstY, - (int)dstYStride, - dstUV, - (int)dstUVStride, - i420Buffer.width, - i420Buffer.height); - } else { - uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); - const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); - - if (pixelFormat == kCVPixelFormatType_32BGRA) { - // Corresponds to libyuv::FOURCC_ARGB - I420ToARGB(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } else if (pixelFormat == kCVPixelFormatType_32ARGB) { - // Corresponds to libyuv::FOURCC_BGRA - I420ToBGRA(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } - } - - CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); -} - -#pragma mark - RTCVideoRenderer methods -- (void)renderFrame:(RTCVideoFrame *)frame { - - [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; - - __weak FlutterRTCVideoRenderer *weakSelf = self; - if(_renderSize.width != frame.width || _renderSize.height != frame.height){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeVideoSize", - @"id": @(strongSelf.textureId), - @"width": @(frame.width), - @"height": @(frame.height), - }); - } - }); - _renderSize = CGSizeMake(frame.width, frame.height); - } - - if(frame.rotation != _rotation){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeRotation", - @"id": @(strongSelf.textureId), - @"rotation": @(frame.rotation), - }); - } - }); - - _rotation = frame.rotation; - } - - //Notify the Flutter new pixelBufferRef to be ready. - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - [strongSelf.registry textureFrameAvailable:strongSelf.textureId]; - if (!strongSelf->_isFirstFrameRendered) { - if (strongSelf.eventSink) { - strongSelf.eventSink(@{@"event":@"didFirstFrameRendered"}); - strongSelf->_isFirstFrameRendered = true; - } - } - }); -} - -/** - * Sets the size of the video frame to render. - * - * @param size The size of the video frame to render. - */ -- (void)setSize:(CGSize)size { - if(_pixelBufferRef == nil || (size.width != _frameSize.width || size.height != _frameSize.height)) - { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } - NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; - CVPixelBufferCreate(kCFAllocatorDefault, - size.width, size.height, - kCVPixelFormatType_32BGRA, - (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); - - _frameSize = size; - } -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - _eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - _eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger]; -} - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view peerConnectionId:(NSString *)peerConnectionId{ - - RTCVideoTrack *videoTrack; - RTCMediaStream *stream = [self streamForId:streamId peerConnectionId:peerConnectionId]; - if(stream){ - NSArray *videoTracks = stream ? stream.videoTracks : nil; - videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; - if (!videoTrack) { - NSLog(@"No video track for RTCMediaStream: %@", streamId); - } - } else { - videoTrack = nil; - } - - view.videoTrack = videoTrack; -} - -@end - diff --git a/ios/Classes/FlutterRTCVideoRenderer.m b/ios/Classes/FlutterRTCVideoRenderer.m new file mode 120000 index 0000000000..77a0efd6d2 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.m \ No newline at end of file diff --git a/ios/Classes/FlutterWebRTCPlugin.h b/ios/Classes/FlutterWebRTCPlugin.h deleted file mode 100644 index 3be67f3518..0000000000 --- a/ios/Classes/FlutterWebRTCPlugin.h +++ /dev/null @@ -1,30 +0,0 @@ -#import -#import - -#import -#import -#import -#import -#import -#import - -@class FlutterRTCVideoRenderer; - -@interface FlutterWebRTCPlugin : NSObject - -@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory; -@property (nonatomic, strong) NSMutableDictionary *peerConnections; -@property (nonatomic, strong) NSMutableDictionary *localStreams; -@property (nonatomic, strong) NSMutableDictionary *localTracks; -@property (nonatomic, strong) NSMutableDictionary *renders; -@property (nonatomic, retain) UIViewController *viewController;/*for broadcast or ReplayKit */ -@property (nonatomic, strong) NSObject* messenger; -@property (nonatomic, strong) RTCCameraVideoCapturer *videoCapturer; -@property (nonatomic) BOOL _usingFrontCamera; -@property (nonatomic) int _targetWidth; -@property (nonatomic) int _targetHeight; -@property (nonatomic) int _targetFps; - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId; - -@end diff --git a/ios/Classes/FlutterWebRTCPlugin.h b/ios/Classes/FlutterWebRTCPlugin.h new file mode 120000 index 0000000000..b8713b38ef --- /dev/null +++ b/ios/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.h \ No newline at end of file diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m deleted file mode 100644 index c515f3b381..0000000000 --- a/ios/Classes/FlutterWebRTCPlugin.m +++ /dev/null @@ -1,825 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCVideoRenderer.h" - -#import -#import - - - -@implementation FlutterWebRTCPlugin { - FlutterMethodChannel *_methodChannel; - id _registry; - id _messenger; - id _textures; - BOOL _speakerOn; -} - -@synthesize messenger = _messenger; - -+ (void)registerWithRegistrar:(NSObject*)registrar { - - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"FlutterWebRTC.Method" - binaryMessenger:[registrar messenger]]; - UIViewController *viewController = (UIViewController *)registrar.messenger; - FlutterWebRTCPlugin* instance = [[FlutterWebRTCPlugin alloc] initWithChannel:channel - registrar:registrar - messenger:[registrar messenger] - viewController:viewController - withTextures:[registrar textures]]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithChannel:(FlutterMethodChannel *)channel - registrar:(NSObject*)registrar - messenger:(NSObject*)messenger - viewController:(UIViewController *)viewController - withTextures:(NSObject *)textures{ - - self = [super init]; - - if (self) { - _methodChannel = channel; - _registry = registrar; - _textures = textures; - _messenger = messenger; - _speakerOn = NO; - self.viewController = viewController; - } - - RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init]; - RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init]; - - _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] - initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory]; - - - self.peerConnections = [NSMutableDictionary new]; - self.localStreams = [NSMutableDictionary new]; - self.localTracks = [NSMutableDictionary new]; - self.renders = [[NSMutableDictionary alloc] init]; - - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didSessionRouteChange:) name:AVAudioSessionRouteChangeNotification object:nil]; - - return self; -} - - -- (void)didSessionRouteChange:(NSNotification *)notification { - NSDictionary *interuptionDict = notification.userInfo; - NSInteger routeChangeReason = [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue]; - - switch (routeChangeReason) { - case AVAudioSessionRouteChangeReasonCategoryChange: { - NSError* error; - [[AVAudioSession sharedInstance] overrideOutputAudioPort:_speakerOn? AVAudioSessionPortOverrideSpeaker : AVAudioSessionPortOverrideNone error:&error]; - } - break; - - default: - break; - } -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result { - - if ([@"createPeerConnection" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* configuration = argsMap[@"configuration"]; - NSDictionary* constraints = argsMap[@"constraints"]; - - RTCPeerConnection *peerConnection = [self.peerConnectionFactory - peerConnectionWithConfiguration:[self RTCConfiguration:configuration] - constraints:[self parseMediaConstraints:constraints] - delegate:self]; - - peerConnection.remoteStreams = [NSMutableDictionary new]; - peerConnection.remoteTracks = [NSMutableDictionary new]; - peerConnection.dataChannels = [NSMutableDictionary new]; - - NSString *peerConnectionId = [[NSUUID UUID] UUIDString]; - peerConnection.flutterId = peerConnectionId; - - /*Create Event Channel.*/ - peerConnection.eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/peerConnectoinEvent%@", peerConnectionId] - binaryMessenger:_messenger]; - [peerConnection.eventChannel setStreamHandler:peerConnection]; - - self.peerConnections[peerConnectionId] = peerConnection; - result(@{ @"peerConnectionId" : peerConnectionId}); - } else if ([@"getUserMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getUserMedia:constraints result:result]; - } else if ([@"getDisplayMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getDisplayMedia:constraints result:result]; - } else if ([@"createLocalMediaStream" isEqualToString:call.method]) { - [self createLocalMediaStream:result]; - } else if ([@"getSources" isEqualToString:call.method]) { - [self getSources:result]; - } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - [self mediaStreamGetTracks:streamId result:result]; - } else if ([@"createOffer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result ]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"createAnswer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary * constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateAnswer:constraints - peerConnection:peerConnection - result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection addStream:stream]; - result(@""); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"removeStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection removeStream:stream]; - result(nil); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"captureFrame" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* path = argsMap[@"path"]; - NSString* trackId = argsMap[@"trackId"]; - - RTCMediaStreamTrack *track = [self trackForId: trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - if(peerConnection) - { - [self peerConnectionSetLocalDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - - if(peerConnection) - { - [self peerConnectionSetRemoteDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"sendDtmf" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* tone = argsMap[@"tone"]; - int duration = ((NSNumber*)argsMap[@"duration"]).intValue; - int interToneGap = ((NSNumber*)argsMap[@"gap"]).intValue; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - - RTCRtpSender* audioSender = nil ; - for( RTCRtpSender *rtpSender in peerConnection.senders){ - if([[[rtpSender track] kind] isEqualToString:@"audio"]) { - audioSender = rtpSender; - } - } - if(audioSender){ - NSOperationQueue *queue = [[NSOperationQueue alloc] init]; - [queue addOperationWithBlock:^{ - double durationMs = duration / 1000.0; - double interToneGapMs = interToneGap / 1000.0; - [audioSender.dtmfSender insertDtmf :(NSString *)tone - duration:(NSTimeInterval) durationMs interToneGap:(NSTimeInterval)interToneGapMs]; - NSLog(@"DTMF Tone played "); - }]; - } - - result(@{@"result": @"success"}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addCandidate" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* candMap = argsMap[@"candidate"]; - NSString *sdp = candMap[@"candidate"]; - int sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; - NSString *sdpMid = candMap[@"sdpMid"]; - - RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp sdpMLineIndex:sdpMLineIndex sdpMid:sdpMid]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection) - { - [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getStats" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - return [self peerConnectionGetStats:trackId peerConnection:peerConnection result:result]; - result(nil); - } else if ([@"createDataChannel" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* label = argsMap[@"label"]; - NSDictionary * dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; - [self createDataChannel:peerConnectionId - label:label - config:[self RTCDataChannelConfiguration:dataChannelDict] - messenger:_messenger]; - result(nil); - } else if ([@"dataChannelSend" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - NSString* type = argsMap[@"type"]; - id data = argsMap[@"data"]; - - [self dataChannelSend:peerConnectionId - dataChannelId:dataChannelId - data:data - type:type]; - result(nil); - } else if ([@"dataChannelClose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - [self dataChannelClose:peerConnectionId - dataChannelId:dataChannelId]; - result(nil); - } else if ([@"streamDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - RTCVideoSource *source = videoTrack.source; - if(source){ - [self.videoCapturer stopCapture]; - self.videoCapturer = nil; - } - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:streamId]; - } - result(nil); - } else if ([@"mediaStreamTrackSetEnable" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* enabled = argsMap[@"enabled"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil){ - track.isEnabled = enabled.boolValue; - } - result(nil); - } else if ([@"mediaStreamAddTrack" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - NSString* trackId = argsMap[@"trackId"]; - - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil) { - if([track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - [stream addAudioTrack:audioTrack]; - } else if ([track isKindOfClass:[RTCVideoTrack class]]){ - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [stream addVideoTrack:videoTrack]; - } - } else { - result([FlutterError errorWithCode:@"mediaStreamAddTrack: Track is nil" message:nil details:nil]); - } - } else { - result([FlutterError errorWithCode:@"mediaStreamAddTrack: Stream is nil" message:nil details:nil]); - } - result(nil); - } else if ([@"mediaStreamRemoveTrack" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil) { - if([track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - [stream removeAudioTrack:audioTrack]; - } else if ([track isKindOfClass:[RTCVideoTrack class]]){ - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [stream removeVideoTrack:videoTrack]; - } - } else { - result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Track is nil" message:nil details:nil]); - } - } else { - result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Stream is nil" message:nil details:nil]); - } - result(nil); - } else if ([@"trackDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - [self.localTracks removeObjectForKey:trackId]; - result(nil); - } else if ([@"peerConnectionClose" isEqualToString:call.method] || [@"peerConnectionDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if (peerConnection) { - [peerConnection close]; - [self.peerConnections removeObjectForKey:peerConnectionId]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - for (NSNumber *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; - } - result(nil); - } else if ([@"createVideoRenderer" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - FlutterRTCVideoRenderer* render = [self createWithTextureRegistry:_textures - messenger:_messenger]; - self.renders[@(render.textureId)] = render; - result(@{@"textureId": @(render.textureId)}); - } else if ([@"videoRendererDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - render.videoTrack = nil; - [render dispose]; - [self.renders removeObjectForKey:textureId]; - result(nil); - } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - NSString *streamId = argsMap[@"streamId"]; - NSString *peerConnectionId = argsMap[@"ownerTag"]; - if(render){ - [self setStreamId:streamId view:render peerConnectionId:peerConnectionId]; - } - result(nil); - } else if ([@"mediaStreamTrackHasTorch" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackHasTorch:videoTrack result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"mediaStreamTrackSetTorch" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - BOOL torch = [argsMap[@"torch"] boolValue]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackSetTorch:videoTrack torch:torch result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackSwitchCamera:videoTrack result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setVolume" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* volume = argsMap[@"volume"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - RTCAudioSource *audioSource = audioTrack.source; - audioSource.volume = [volume doubleValue]; - } - result(nil); - } else if ([@"setMicrophoneMute" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* mute = argsMap[@"mute"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - audioTrack.isEnabled = !mute.boolValue; - } - result(nil); - } else if ([@"enableSpeakerphone" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSNumber* enable = argsMap[@"enable"]; - _speakerOn = enable.boolValue; - AVAudioSession *audioSession = [AVAudioSession sharedInstance]; - [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord - withOptions:_speakerOn ? AVAudioSessionCategoryOptionDefaultToSpeaker : 0 - error:nil]; - [audioSession setActive:YES error:nil]; - result(nil); - } else if ([@"getLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.localDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.remoteDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setConfiguration" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* configuration = argsMap[@"configuration"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] peerConnection:peerConnection]; - result(nil); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else { - result(FlutterMethodNotImplemented); - } -} - -- (void)dealloc -{ - [_localTracks removeAllObjects]; - _localTracks = nil; - [_localStreams removeAllObjects]; - _localStreams = nil; - - for (NSString *peerConnectionId in _peerConnections) { - RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId]; - peerConnection.delegate = nil; - [peerConnection close]; - } - [_peerConnections removeAllObjects]; - _peerConnectionFactory = nil; -} - - --(void)mediaStreamGetTracks:(NSString*)streamId - result:(FlutterResult)result { - RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""]; - if(stream){ - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCMediaStreamTrack *track in stream.audioTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [audioTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - for (RTCMediaStreamTrack *track in stream.videoTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [videoTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - result(@{@"audioTracks": audioTracks, @"videoTracks" : videoTracks }); - }else{ - result(nil); - } -} - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId -{ - RTCMediaStream *stream = _localStreams[streamId]; - if (!stream) { - if (peerConnectionId.length > 0) { - RTCPeerConnection *peerConnection = [_peerConnections objectForKey:peerConnectionId]; - stream = peerConnection.remoteStreams[streamId]; - } else { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - stream = peerConnection.remoteStreams[streamId]; - if (stream) { - break; - } - } - } - } - return stream; -} - -- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId -{ - RTCMediaStreamTrack *track = _localTracks[trackId]; - if (!track) { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - track = peerConnection.remoteTracks[trackId]; - if (track) { - break; - } - } - } - - return track; -} - -- (RTCIceServer *)RTCIceServer:(id)json -{ - if (!json) { - NSLog(@"a valid iceServer value"); - return nil; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return nil; - } - - NSArray *urls; - if ([json[@"url"] isKindOfClass:[NSString class]]) { - // TODO: 'url' is non-standard - urls = @[json[@"url"]]; - } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { - urls = @[json[@"urls"]]; - } else { - urls = (NSArray*)json[@"urls"]; - } - - if (json[@"username"] != nil || json[@"credential"] != nil) { - return [[RTCIceServer alloc]initWithURLStrings:urls - username:json[@"username"] - credential:json[@"credential"]]; - } - - return [[RTCIceServer alloc] initWithURLStrings:urls]; -} - - -- (nonnull RTCConfiguration *)RTCConfiguration:(id)json -{ - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - - if (!json) { - return config; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return config; - } - - if (json[@"audioJitterBufferMaxPackets"] != nil && [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { - config.audioJitterBufferMaxPackets = [json[@"audioJitterBufferMaxPackets"] intValue]; - } - - if (json[@"bundlePolicy"] != nil && [json[@"bundlePolicy"] isKindOfClass:[NSString class]]) { - NSString *bundlePolicy = json[@"bundlePolicy"]; - if ([bundlePolicy isEqualToString:@"balanced"]) { - config.bundlePolicy = RTCBundlePolicyBalanced; - } else if ([bundlePolicy isEqualToString:@"max-compat"]) { - config.bundlePolicy = RTCBundlePolicyMaxCompat; - } else if ([bundlePolicy isEqualToString:@"max-bundle"]) { - config.bundlePolicy = RTCBundlePolicyMaxBundle; - } - } - - if (json[@"iceBackupCandidatePairPingInterval"] != nil && [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { - config.iceBackupCandidatePairPingInterval = [json[@"iceBackupCandidatePairPingInterval"] intValue]; - } - - if (json[@"iceConnectionReceivingTimeout"] != nil && [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { - config.iceConnectionReceivingTimeout = [json[@"iceConnectionReceivingTimeout"] intValue]; - } - - if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { - NSMutableArray *iceServers = [NSMutableArray new]; - for (id server in json[@"iceServers"]) { - RTCIceServer *convert = [self RTCIceServer:server]; - if (convert != nil) { - [iceServers addObject:convert]; - } - } - config.iceServers = iceServers; - } - - if (json[@"iceTransportPolicy"] != nil && [json[@"iceTransportPolicy"] isKindOfClass:[NSString class]]) { - NSString *iceTransportPolicy = json[@"iceTransportPolicy"]; - if ([iceTransportPolicy isEqualToString:@"all"]) { - config.iceTransportPolicy = RTCIceTransportPolicyAll; - } else if ([iceTransportPolicy isEqualToString:@"none"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNone; - } else if ([iceTransportPolicy isEqualToString:@"nohost"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNoHost; - } else if ([iceTransportPolicy isEqualToString:@"relay"]) { - config.iceTransportPolicy = RTCIceTransportPolicyRelay; - } - } - - if (json[@"rtcpMuxPolicy"] != nil && [json[@"rtcpMuxPolicy"] isKindOfClass:[NSString class]]) { - NSString *rtcpMuxPolicy = json[@"rtcpMuxPolicy"]; - if ([rtcpMuxPolicy isEqualToString:@"negotiate"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate; - } else if ([rtcpMuxPolicy isEqualToString:@"require"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyRequire; - } - } - - if (json[@"tcpCandidatePolicy"] != nil && [json[@"tcpCandidatePolicy"] isKindOfClass:[NSString class]]) { - NSString *tcpCandidatePolicy = json[@"tcpCandidatePolicy"]; - if ([tcpCandidatePolicy isEqualToString:@"enabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyEnabled; - } else if ([tcpCandidatePolicy isEqualToString:@"disabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled; - } - } - - if (json[@"sdpSemantics"] != nil && [json[@"sdpSemantics"] isKindOfClass:[NSString class]]) { - NSString *sdpSemantics = json[@"sdpSemantics"]; - if ([sdpSemantics isEqualToString:@"plan-b"]) { - config.sdpSemantics = RTCSdpSemanticsPlanB; - } else if ([sdpSemantics isEqualToString:@"unified-plan"]) { - config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; - } - } - - return config; -} - -- (RTCDataChannelConfiguration *)RTCDataChannelConfiguration:(id)json -{ - if (!json) { - return nil; - } - if ([json isKindOfClass:[NSDictionary class]]) { - RTCDataChannelConfiguration *init = [RTCDataChannelConfiguration new]; - - if (json[@"id"]) { - [init setChannelId:(int)[json[@"id"] integerValue]]; - } - if (json[@"ordered"]) { - init.isOrdered = [json[@"ordered"] boolValue]; - } - if (json[@"maxRetransmitTime"]) { - init.maxRetransmitTimeMs = [json[@"maxRetransmitTime"] integerValue]; - } - if (json[@"maxRetransmits"]) { - init.maxRetransmits = [json[@"maxRetransmits"] intValue]; - } - if (json[@"negotiated"]) { - init.isNegotiated = [json[@"negotiated"] boolValue]; - } - if (json[@"protocol"]) { - init.protocol = json[@"protocol"]; - } - return init; - } - return nil; -} - -- (CGRect)parseRect:(NSDictionary *)rect { - return CGRectMake([[rect valueForKey:@"left"] doubleValue], - [[rect valueForKey:@"top"] doubleValue], - [[rect valueForKey:@"width"] doubleValue], - [[rect valueForKey:@"height"] doubleValue]); -} - -@end diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m new file mode 120000 index 0000000000..7d5cc6ca16 --- /dev/null +++ b/ios/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.m \ No newline at end of file diff --git a/ios/flutter_webrtc.podspec b/ios/flutter_webrtc.podspec index aa19eea685..162c3e99e1 100644 --- a/ios/flutter_webrtc.podspec +++ b/ios/flutter_webrtc.podspec @@ -16,7 +16,7 @@ A new flutter plugin project. s.public_header_files = 'Classes/**/*.h' s.dependency 'Flutter' s.dependency 'Libyuv', '1703' - s.dependency 'GoogleWebRTC', '1.1.29400' + s.dependency 'GoogleWebRTC', '1.1.31999' s.ios.deployment_target = '10.0' s.static_framework = true end diff --git a/lib/flutter_webrtc.dart b/lib/flutter_webrtc.dart index f64507e3f3..fd5d1a3dc6 100644 --- a/lib/flutter_webrtc.dart +++ b/lib/flutter_webrtc.dart @@ -7,8 +7,14 @@ export 'src/interface/rtc_data_channel.dart'; export 'src/interface/rtc_dtmf_sender.dart'; export 'src/interface/rtc_ice_candidate.dart'; export 'src/interface/rtc_peerconnection.dart'; +export 'src/interface/rtc_rtcp_parameters.dart'; +export 'src/interface/rtc_rtp_parameters.dart'; +export 'src/interface/rtc_rtp_receiver.dart'; +export 'src/interface/rtc_rtp_sender.dart'; +export 'src/interface/rtc_rtp_transceiver.dart'; export 'src/interface/rtc_session_description.dart'; export 'src/interface/rtc_stats_report.dart'; +export 'src/interface/rtc_track_event.dart'; export 'src/media_devices.dart'; export 'src/media_recorder.dart'; export 'src/native/rtc_peerconnection_factory.dart' diff --git a/lib/src/interface/enums.dart b/lib/src/interface/enums.dart index 7bc1721d20..017345d606 100644 --- a/lib/src/interface/enums.dart +++ b/lib/src/interface/enums.dart @@ -25,6 +25,15 @@ enum RTCIceGatheringState { RTCIceGatheringStateComplete } +enum RTCPeerConnectionState { + RTCPeerConnectionStateClosed, + RTCPeerConnectionStateFailed, + RTCPeerConnectionStateDisconnected, + RTCPeerConnectionStateNew, + RTCPeerConnectionStateConnecting, + RTCPeerConnectionStateConnected +} + enum RTCIceConnectionState { RTCIceConnectionStateNew, RTCIceConnectionStateChecking, @@ -40,6 +49,44 @@ enum RTCVideoViewObjectFit { RTCVideoViewObjectFitContain, RTCVideoViewObjectFitCover, } +enum RTCRtpMediaType { + RTCRtpMediaTypeAudio, + RTCRtpMediaTypeVideo, + RTCRtpMediaTypeData, +} + +final typeRTCRtpMediaTypetoString = { + RTCRtpMediaType.RTCRtpMediaTypeAudio: 'audio', + RTCRtpMediaType.RTCRtpMediaTypeVideo: 'video', + RTCRtpMediaType.RTCRtpMediaTypeData: 'data', +}; + +final typeStringToRTCRtpMediaType = { + 'audio': RTCRtpMediaType.RTCRtpMediaTypeAudio, + 'video': RTCRtpMediaType.RTCRtpMediaTypeVideo, + 'data': RTCRtpMediaType.RTCRtpMediaTypeData, +}; + +enum TransceiverDirection { + SendRecv, + SendOnly, + RecvOnly, + Inactive, +} + +final typeStringToRtpTransceiverDirection = { + 'sendrecv': TransceiverDirection.SendRecv, + 'sendonly': TransceiverDirection.SendOnly, + 'recvonly': TransceiverDirection.RecvOnly, + 'inactive': TransceiverDirection.Inactive, +}; + +final typeRtpTransceiverDirectionToString = { + TransceiverDirection.SendRecv: 'sendrecv', + TransceiverDirection.SendOnly: 'sendonly', + TransceiverDirection.RecvOnly: 'recvonly', + TransceiverDirection.Inactive: 'inactive', +}; RTCIceConnectionState iceConnectionStateForString(String state) { switch (state) { @@ -106,3 +153,22 @@ RTCDataChannelState rtcDataChannelStateForString(String state) { } return RTCDataChannelState.RTCDataChannelClosed; } + +RTCPeerConnectionState peerConnectionStateForString(String state) { + switch (state) { + case 'new': + return RTCPeerConnectionState.RTCPeerConnectionStateNew; + case 'connecting': + return RTCPeerConnectionState.RTCPeerConnectionStateConnecting; + case 'connected': + return RTCPeerConnectionState.RTCPeerConnectionStateConnected; + case 'closed': + return RTCPeerConnectionState.RTCPeerConnectionStateClosed; + case 'disconnected': + return RTCPeerConnectionState.RTCPeerConnectionStateDisconnected; + case 'failed': + return RTCPeerConnectionState.RTCPeerConnectionStateFailed; + } + + return RTCPeerConnectionState.RTCPeerConnectionStateClosed; +} diff --git a/lib/src/interface/media_stream.dart b/lib/src/interface/media_stream.dart index fc31c6c508..ecbaba708a 100644 --- a/lib/src/interface/media_stream.dart +++ b/lib/src/interface/media_stream.dart @@ -1,10 +1,16 @@ import 'media_stream_track.dart'; +typedef MediaTrackCallback = void Function(MediaStreamTrack track); + abstract class MediaStream { MediaStream(this._id, this._ownerTag); final String _id; final String _ownerTag; + MediaTrackCallback onAddTrack; + + MediaTrackCallback onRemoveTrack; + String get id => _id; String get ownerTag => _ownerTag; @@ -16,6 +22,8 @@ abstract class MediaStream { Future removeTrack(MediaStreamTrack track, {bool removeFromNative = true}); + List getTracks(); + List getAudioTracks(); List getVideoTracks(); diff --git a/lib/src/interface/media_stream_track.dart b/lib/src/interface/media_stream_track.dart index a7df880af7..17ed0076d1 100644 --- a/lib/src/interface/media_stream_track.dart +++ b/lib/src/interface/media_stream_track.dart @@ -1,6 +1,12 @@ +typedef StreamTrackCallback = Function(); + abstract class MediaStreamTrack { MediaStreamTrack(); + StreamTrackCallback onEnded; + + StreamTrackCallback onMute; + bool get enabled; set enabled(bool b); diff --git a/lib/src/interface/rtc_peerconnection.dart b/lib/src/interface/rtc_peerconnection.dart index 479bd2fc40..795e3ac2fb 100644 --- a/lib/src/interface/rtc_peerconnection.dart +++ b/lib/src/interface/rtc_peerconnection.dart @@ -4,10 +4,16 @@ import 'media_stream_track.dart'; import 'rtc_data_channel.dart'; import 'rtc_dtmf_sender.dart'; import 'rtc_ice_candidate.dart'; +import 'rtc_rtp_receiver.dart'; +import 'rtc_rtp_sender.dart'; +import 'rtc_rtp_transceiver.dart'; import 'rtc_session_description.dart'; import 'rtc_stats_report.dart'; +import 'rtc_track_event.dart'; typedef SignalingStateCallback = void Function(RTCSignalingState state); +typedef PeerConnectionStateCallback = void Function( + RTCPeerConnectionState state); typedef IceGatheringStateCallback = void Function(RTCIceGatheringState state); typedef IceConnectionStateCallback = void Function(RTCIceConnectionState state); typedef IceCandidateCallback = void Function(RTCIceCandidate candidate); @@ -20,15 +26,15 @@ typedef RemoveTrackCallback = void Function( typedef RTCDataChannelCallback = void Function(RTCDataChannel channel); typedef RenegotiationNeededCallback = void Function(); +/// Unified-Plan +typedef UnifiedPlanTrackCallback = void Function(RTCTrackEvent event); + abstract class RTCPeerConnection { RTCPeerConnection(); - // RTCSignalingState _signalingState; - // RTCIceGatheringState _iceGatheringState; - // RTCIceConnectionState _iceConnectionState; - // public: delegate SignalingStateCallback onSignalingState; + PeerConnectionStateCallback onConnectionState; IceGatheringStateCallback onIceGatheringState; IceConnectionStateCallback onIceConnectionState; IceCandidateCallback onIceCandidate; @@ -39,12 +45,17 @@ abstract class RTCPeerConnection { RTCDataChannelCallback onDataChannel; RenegotiationNeededCallback onRenegotiationNeeded; + /// Unified-Plan + UnifiedPlanTrackCallback onTrack; + RTCSignalingState get signalingState; RTCIceGatheringState get iceGatheringState; RTCIceConnectionState get iceConnectionState; + RTCPeerConnectionState get connectionState; + Future dispose(); Map get getConfiguration; @@ -78,8 +89,27 @@ abstract class RTCPeerConnection { Future close(); - //'audio|video', { 'direction': 'recvonly|sendonly|sendrecv' } - void addTransceiver(String type, Map options); - RTCDTMFSender createDtmfSender(MediaStreamTrack track); + + /// Unified-Plan. + List get senders; + + List get receivers; + + List get transceivers; + + Future createSender(String kind, String streamId); + + Future addTrack(MediaStreamTrack track, + [List streams]); + + Future removeTrack(RTCRtpSender sender); + + Future closeSender(RTCRtpSender sender); + + /// 'audio|video', { 'direction': 'recvonly|sendonly|sendrecv' } + Future addTransceiver( + {MediaStreamTrack track, + RTCRtpMediaType kind, + RTCRtpTransceiverInit init}); } diff --git a/lib/src/interface/rtc_rtcp_parameters.dart b/lib/src/interface/rtc_rtcp_parameters.dart new file mode 100644 index 0000000000..5c2ee56f6d --- /dev/null +++ b/lib/src/interface/rtc_rtcp_parameters.dart @@ -0,0 +1,19 @@ +class RTCRTCPParameters { + RTCRTCPParameters(this.cname, this.reducedSize); + factory RTCRTCPParameters.fromMap(Map map) { + return RTCRTCPParameters(map['cname'], map['reducedSize']); + } + + /// The Canonical Name used by RTCP + String cname; + + /// Whether reduced size RTCP is configured or compound RTCP + bool reducedSize; + + Map toMap() { + return { + 'cname': cname, + 'reducedSize': reducedSize, + }; + } +} diff --git a/lib/src/interface/rtc_rtp_parameters.dart b/lib/src/interface/rtc_rtp_parameters.dart new file mode 100644 index 0000000000..075ac05d5a --- /dev/null +++ b/lib/src/interface/rtc_rtp_parameters.dart @@ -0,0 +1,203 @@ +import 'rtc_rtcp_parameters.dart'; + +class RTCRTPCodec { + RTCRTPCodec( + {this.payloadType, + this.name, + this.kind, + this.clockRate, + this.numChannels, + this.parameters}); + + factory RTCRTPCodec.fromMap(Map map) { + return RTCRTPCodec( + payloadType: map['payloadType'], + name: map['name'], + kind: map['kind'], + clockRate: map['clockRate'], + numChannels: map['numChannels'] ?? 1, + parameters: map['parameters']); + } + // Payload type used to identify this codec in RTP packets. + int payloadType; + + /// Name used to identify the codec. Equivalent to MIME subtype. + String name; + + /// The media type of this codec. Equivalent to MIME top-level type. + String kind; + + /// Clock rate in Hertz. + int clockRate; + + /// The number of audio channels used. Set to null for video codecs. + int numChannels; + + /// The "format specific parameters" field from the "a=fmtp" line in the SDP + Map parameters; + + Map toMap() { + return { + 'payloadType': payloadType, + 'name': name, + 'kind': kind, + 'clockRate': clockRate, + 'numChannels': numChannels, + 'parameters': parameters, + }; + } +} + +class RTCRtpEncoding { + RTCRtpEncoding( + {this.rid, + this.active, + this.maxBitrateBps, + this.maxFramerate, + this.minBitrateBps, + this.numTemporalLayers, + this.scaleResolutionDownBy, + this.ssrc}); + + factory RTCRtpEncoding.fromMap(Map map) { + return RTCRtpEncoding( + rid: map['rid'], + active: map['active'], + maxBitrateBps: map['maxBitrateBps'], + maxFramerate: map['maxFramerate'], + minBitrateBps: map['minBitrateBps'], + numTemporalLayers: map['numTemporalLayers'], + scaleResolutionDownBy: map['scaleResolutionDownBy'], + ssrc: map['ssrc']); + } + + /// If non-null, this represents the RID that identifies this encoding layer. + /// RIDs are used to identify layers in simulcast. + String rid; + + /// Set to true to cause this encoding to be sent, and false for it not to + /// be sent. + bool active = true; + + /// If non-null, this represents the Transport Independent Application + /// Specific maximum bandwidth defined in RFC3890. If null, there is no + /// maximum bitrate. + int maxBitrateBps; + + /// The minimum bitrate in bps for video. + int minBitrateBps; + + /// The max framerate in fps for video. + int maxFramerate; + + /// The number of temporal layers for video. + int numTemporalLayers = 1; + + /// If non-null, scale the width and height down by this factor for video. If null, + /// implementation default scaling factor will be used. + double scaleResolutionDownBy = 1.0; + + /// SSRC to be used by this encoding. + /// Can't be changed between getParameters/setParameters. + int ssrc; + + Map toMap() { + return { + if (rid != null) 'rid': rid, + if (active != null) 'active': active, + if (maxBitrateBps != null) 'maxBitrateBps': maxBitrateBps, + if (maxFramerate != null) 'maxFramerate': maxFramerate, + if (minBitrateBps != null) 'minBitrateBps': minBitrateBps, + if (numTemporalLayers != null) 'numTemporalLayers': numTemporalLayers, + if (scaleResolutionDownBy != null) + 'scaleResolutionDownBy': scaleResolutionDownBy, + if (ssrc != null) 'ssrc': ssrc, + }; + } +} + +class RTCHeaderExtension { + RTCHeaderExtension({this.uri, this.id, this.encrypted}); + factory RTCHeaderExtension.fromMap(Map map) { + return RTCHeaderExtension( + uri: map['uri'], id: map['id'], encrypted: map['encrypted']); + } + + /// The URI of the RTP header extension, as defined in RFC5285. + String uri; + + /// The value put in the RTP packet to identify the header extension. + int id; + + /// Whether the header extension is encrypted or not. + bool encrypted; + + Map toMap() { + return { + 'uri': uri, + 'id': id, + 'encrypted': encrypted, + }; + } +} + +class RTCRtpParameters { + RTCRtpParameters(this.transactionId, this.rtcp, this.headerExtensions, + this.encodings, this.codecs); + + factory RTCRtpParameters.fromMap(Map map) { + var encodings = []; + dynamic encodingsMap = map['encodings']; + encodingsMap.forEach((params) { + encodings.add(RTCRtpEncoding.fromMap(params)); + }); + var headerExtensions = []; + dynamic headerExtensionsMap = map['headerExtensions']; + headerExtensionsMap.forEach((params) { + headerExtensions.add(RTCHeaderExtension.fromMap(params)); + }); + var codecs = []; + dynamic codecsMap = map['codecs']; + codecsMap.forEach((params) { + codecs.add(RTCRTPCodec.fromMap(params)); + }); + var rtcp = RTCRTCPParameters.fromMap(map['rtcp']); + return RTCRtpParameters( + map['transactionId'], rtcp, headerExtensions, encodings, codecs); + } + + String transactionId; + + RTCRTCPParameters rtcp; + + List headerExtensions; + + List encodings; + + /// Codec parameters can't currently be changed between getParameters and + /// setParameters. Though in the future it will be possible to reorder them or + /// remove them. + List codecs; + + Map toMap() { + var headerExtensionsList = []; + headerExtensions.forEach((params) { + headerExtensionsList.add(params.toMap()); + }); + var encodingList = []; + encodings.forEach((params) { + encodingList.add(params.toMap()); + }); + var codecsList = []; + codecs.forEach((params) { + codecsList.add(params.toMap()); + }); + return { + 'transactionId': transactionId, + 'rtcp': rtcp.toMap(), + 'headerExtensions': headerExtensionsList, + 'encodings': encodingList, + 'codecs': codecsList, + }; + } +} diff --git a/lib/src/interface/rtc_rtp_receiver.dart b/lib/src/interface/rtc_rtp_receiver.dart new file mode 100644 index 0000000000..b654a813de --- /dev/null +++ b/lib/src/interface/rtc_rtp_receiver.dart @@ -0,0 +1,26 @@ +import 'dart:async'; + +import 'enums.dart'; +import 'media_stream_track.dart'; +import 'rtc_rtp_parameters.dart'; + +typedef OnFirstPacketReceivedCallback = void Function( + RTCRtpReceiver rtpReceiver, RTCRtpMediaType mediaType); + +abstract class RTCRtpReceiver { + RTCRtpReceiver(); + + /// public: + OnFirstPacketReceivedCallback onFirstPacketReceived; + + /// The WebRTC specification only defines RTCRtpParameters in terms of senders, + /// but this API also applies them to receivers, similar to ORTC: + /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. + RTCRtpParameters get parameters; + + MediaStreamTrack get track; + + String get receiverId; + + Future dispose(); +} diff --git a/lib/src/interface/rtc_rtp_sender.dart b/lib/src/interface/rtc_rtp_sender.dart new file mode 100644 index 0000000000..02af683f15 --- /dev/null +++ b/lib/src/interface/rtc_rtp_sender.dart @@ -0,0 +1,30 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; + +import 'media_stream_track.dart'; +import 'rtc_dtmf_sender.dart'; +import 'rtc_rtp_parameters.dart'; + +abstract class RTCRtpSender { + RTCRtpSender(); + + Future setParameters(RTCRtpParameters parameters); + + Future replaceTrack(MediaStreamTrack track); + + Future setTrack(MediaStreamTrack track, {bool takeOwnership = true}); + + RTCRtpParameters get parameters; + + MediaStreamTrack get track; + + String get senderId; + + bool get ownsTrack; + + RTCDTMFSender get dtmfSender; + + @mustCallSuper + Future dispose(); +} diff --git a/lib/src/interface/rtc_rtp_transceiver.dart b/lib/src/interface/rtc_rtp_transceiver.dart new file mode 100644 index 0000000000..5f8ed150be --- /dev/null +++ b/lib/src/interface/rtc_rtp_transceiver.dart @@ -0,0 +1,44 @@ +import 'dart:async'; + +import 'enums.dart'; +import 'media_stream.dart'; +import 'rtc_rtp_parameters.dart'; +import 'rtc_rtp_receiver.dart'; +import 'rtc_rtp_sender.dart'; + +List listToRtpEncodings(List> list) { + return list.map((e) => RTCRtpEncoding.fromMap(e)).toList(); +} + +class RTCRtpTransceiverInit { + RTCRtpTransceiverInit({ + this.direction, + this.streams, + this.sendEncodings, + }); + TransceiverDirection direction; + List streams; + List sendEncodings; +} + +abstract class RTCRtpTransceiver { + RTCRtpTransceiver(); + + TransceiverDirection get currentDirection; + + String get mid; + + RTCRtpSender get sender; + + RTCRtpReceiver get receiver; + + bool get stoped; + + String get transceiverId; + + Future setDirection(TransceiverDirection direction); + + Future getCurrentDirection(); + + Future stop(); +} diff --git a/lib/src/interface/rtc_track_event.dart b/lib/src/interface/rtc_track_event.dart new file mode 100644 index 0000000000..2f27f34c1c --- /dev/null +++ b/lib/src/interface/rtc_track_event.dart @@ -0,0 +1,12 @@ +import 'media_stream.dart'; +import 'media_stream_track.dart'; +import 'rtc_rtp_receiver.dart'; +import 'rtc_rtp_transceiver.dart'; + +class RTCTrackEvent { + RTCTrackEvent({this.receiver, this.streams, this.track, this.transceiver}); + final RTCRtpReceiver receiver; + final List streams; + final MediaStreamTrack track; + final RTCRtpTransceiver transceiver; +} diff --git a/lib/src/native/media_stream_impl.dart b/lib/src/native/media_stream_impl.dart index 994c953b07..968dd1e549 100644 --- a/lib/src/native/media_stream_impl.dart +++ b/lib/src/native/media_stream_impl.dart @@ -8,6 +8,12 @@ import 'utils.dart'; class MediaStreamNative extends MediaStream { MediaStreamNative(String streamId, String ownerTag) : super(streamId, ownerTag); + + factory MediaStreamNative.fromMap(Map map) { + return MediaStreamNative(map['streamId'], map['ownerTag']) + ..setMediaTracks(map['audioTracks'], map['videoTracks']); + } + final _channel = WebRTC.methodChannel(); final _audioTracks = []; @@ -27,6 +33,11 @@ class MediaStreamNative extends MediaStream { }); } + @override + List getTracks() { + return [..._audioTracks, ..._videoTracks]; + } + @override Future getMediaTracks() async { final response = await _channel.invokeMethod>( diff --git a/lib/src/native/media_stream_track_impl.dart b/lib/src/native/media_stream_track_impl.dart index 03dc74cd30..5f3d08012e 100644 --- a/lib/src/native/media_stream_track_impl.dart +++ b/lib/src/native/media_stream_track_impl.dart @@ -5,6 +5,10 @@ import 'utils.dart'; class MediaStreamTrackNative extends MediaStreamTrack { MediaStreamTrackNative(this._trackId, this._label, this._kind, this._enabled); + factory MediaStreamTrackNative.fromMap(Map map) { + return MediaStreamTrackNative( + map['id'], map['label'], map['kind'], map['enabled']); + } final _channel = WebRTC.methodChannel(); final String _trackId; final String _label; diff --git a/lib/src/native/rtc_dtmf_sender_impl.dart b/lib/src/native/rtc_dtmf_sender_impl.dart index 163713c305..7b92d0088d 100644 --- a/lib/src/native/rtc_dtmf_sender_impl.dart +++ b/lib/src/native/rtc_dtmf_sender_impl.dart @@ -2,9 +2,10 @@ import '../interface/rtc_dtmf_sender.dart'; import 'utils.dart'; class RTCDTMFSenderNative extends RTCDTMFSender { - RTCDTMFSenderNative(this._peerConnectionId); + RTCDTMFSenderNative(this._peerConnectionId, this._rtpSenderId); // peer connection Id must be defined as a variable where this function will be called. final String _peerConnectionId; + final String _rtpSenderId; final _channel = WebRTC.methodChannel(); @override @@ -12,6 +13,7 @@ class RTCDTMFSenderNative extends RTCDTMFSender { {int duration = 100, int interToneGap = 70}) async { await _channel.invokeMethod('sendDtmf', { 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _rtpSenderId, 'tone': tones, 'duration': duration, 'gap': interToneGap, diff --git a/lib/src/native/rtc_peerconnection_impl.dart b/lib/src/native/rtc_peerconnection_impl.dart index 100bd13af1..f518575fc3 100644 --- a/lib/src/native/rtc_peerconnection_impl.dart +++ b/lib/src/native/rtc_peerconnection_impl.dart @@ -9,12 +9,19 @@ import '../interface/rtc_data_channel.dart'; import '../interface/rtc_dtmf_sender.dart'; import '../interface/rtc_ice_candidate.dart'; import '../interface/rtc_peerconnection.dart'; +import '../interface/rtc_rtp_receiver.dart'; +import '../interface/rtc_rtp_sender.dart'; +import '../interface/rtc_rtp_transceiver.dart'; import '../interface/rtc_session_description.dart'; import '../interface/rtc_stats_report.dart'; +import '../interface/rtc_track_event.dart'; import 'media_stream_impl.dart'; import 'media_stream_track_impl.dart'; import 'rtc_data_channel_impl.dart'; import 'rtc_dtmf_sender_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_sender_impl.dart'; +import 'rtc_rtp_transceiver_impl.dart'; import 'utils.dart'; /* @@ -33,11 +40,15 @@ class RTCPeerConnectionNative extends RTCPeerConnection { StreamSubscription _eventSubscription; final _localStreams = []; final _remoteStreams = []; + final List _senders = []; + final List _receivers = []; + final List _transceivers = []; RTCDataChannelNative _dataChannel; Map _configuration; RTCSignalingState _signalingState; RTCIceGatheringState _iceGatheringState; RTCIceConnectionState _iceConnectionState; + RTCPeerConnectionState _connectionState; final Map defaultSdpConstraints = { 'mandatory': { @@ -56,6 +67,13 @@ class RTCPeerConnectionNative extends RTCPeerConnection { @override RTCIceConnectionState get iceConnectionState => _iceConnectionState; + @override + RTCPeerConnectionState get connectionState => _connectionState; + + Future get localDescription => getLocalDescription(); + + Future get remoteDescription => getRemoteDescription(); + /* * PeerConnection event listener. */ @@ -67,6 +85,10 @@ class RTCPeerConnectionNative extends RTCPeerConnection { _signalingState = signalingStateForString(map['state']); onSignalingState?.call(_signalingState); break; + case 'peerConnectionState': + _connectionState = peerConnectionStateForString(map['state']); + onConnectionState?.call(_connectionState); + break; case 'iceGatheringState': _iceGatheringState = iceGatheringStateforString(map['state']); onIceGatheringState?.call(_iceGatheringState); @@ -151,6 +173,43 @@ class RTCPeerConnectionNative extends RTCPeerConnection { case 'onRenegotiationNeeded': onRenegotiationNeeded?.call(); break; + + /// Unified-Plan + case 'onTrack': + var params = map['streams'] as List; + var streams = params.map((e) => MediaStreamNative.fromMap(e)).toList(); + onTrack?.call(RTCTrackEvent( + receiver: RTCRtpReceiverNative.fromMap(map['receiver']), + streams: streams, + track: MediaStreamTrackNative.fromMap(map['track']), + )); + break; + + /// Other + case 'onSelectedCandidatePairChanged': + + /// class RTCIceCandidatePair { + /// RTCIceCandidatePair(this.local, this.remote, this.lastReceivedMs, this.reason); + /// factory RTCIceCandidatePair.fromMap(Map map) { + /// return RTCIceCandidatePair( + /// RTCIceCandidate.fromMap(map['local']), + /// RTCIceCandidate.fromMap(map['remote']), + /// map['lastReceivedMs'], + /// map['reason']); + /// } + /// RTCIceCandidate local; + /// RTCIceCandidate remote; + /// int lastReceivedMs; + /// String reason; + /// } + /// + /// typedef SelectedCandidatePairChangedCallback = void Function(RTCIceCandidatePair pair); + /// SelectedCandidatePairChangedCallback onSelectedCandidatePairChanged; + /// + /// RTCIceCandidatePair iceCandidatePair = RTCIceCandidatePair.fromMap(map); + /// onSelectedCandidatePairChanged?.call(iceCandidatePair); + + break; } } @@ -357,7 +416,7 @@ class RTCPeerConnectionNative extends RTCPeerConnection { @override RTCDTMFSender createDtmfSender(MediaStreamTrack track) { - return RTCDTMFSenderNative(_peerConnectionId); + return RTCDTMFSenderNative(_peerConnectionId, ''); } @override @@ -371,9 +430,107 @@ class RTCPeerConnectionNative extends RTCPeerConnection { } } + /// Unified-Plan. + @override + List get senders => _senders; + + @override + List get receivers => _receivers; + + @override + List get transceivers => _transceivers; + + @override + Future createSender(String kind, String streamId) async { + try { + final response = await _channel.invokeMethod( + 'createSender', { + 'peerConnectionId': _peerConnectionId, + 'kind': kind, + 'streamId': streamId + }); + var sender = RTCRtpSenderNative.fromMap(response); + _senders.add(sender); + return sender; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::createSender: ${e.message}'; + } + } + @override - void addTransceiver(String type, Map options) { - throw UnimplementedError( - 'addTransceiver(String, Map) is not implemented for the native'); + Future addTrack(MediaStreamTrack track, + [List streams]) async { + try { + final response = + await _channel.invokeMethod('addTrack', { + 'peerConnectionId': _peerConnectionId, + 'trackId': track.id, + 'streamIds': streams.map((e) => e.id).toList() + }); + var sender = RTCRtpSenderNative.fromMap(response); + _senders.add(sender); + return sender; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTrack: ${e.message}'; + } + } + + @override + Future removeTrack(RTCRtpSender sender) async { + try { + final response = await _channel.invokeMethod( + 'removeTrack', { + 'peerConnectionId': _peerConnectionId, + 'senderId': sender.senderId + }); + bool result = response['result']; + _senders.removeWhere((item) { + return sender.senderId == item.senderId; + }); + return result; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::removeTrack: ${e.message}'; + } + } + + @override + Future closeSender(RTCRtpSender sender) async { + try { + final response = await _channel.invokeMethod( + 'closeSender', { + 'peerConnectionId': _peerConnectionId, + 'senderId': sender.senderId + }); + bool result = response['result']; + _senders.removeWhere((item) { + return sender.senderId == item.senderId; + }); + return result; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::removeTrack: ${e.message}'; + } + } + + @override + Future addTransceiver( + {MediaStreamTrack track, + RTCRtpMediaType kind, + RTCRtpTransceiverInit init}) async { + try { + final response = + await _channel.invokeMethod('addTransceiver', { + 'peerConnectionId': _peerConnectionId, + if (track != null) 'trackId': track.id, + if (kind != null) 'mediaType': typeRTCRtpMediaTypetoString[kind], + if (init != null) + 'transceiverInit': RTCRtpTransceiverInitNative.initToMap(init) + }); + var transceiver = RTCRtpTransceiverNative.fromMap(response, + peerConnectionId: _peerConnectionId); + _transceivers.add(transceiver); + return transceiver; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTransceiver: ${e.message}'; + } } } diff --git a/lib/src/native/rtc_rtp_receiver_impl.dart b/lib/src/native/rtc_rtp_receiver_impl.dart new file mode 100644 index 0000000000..e6f381cfce --- /dev/null +++ b/lib/src/native/rtc_rtp_receiver_impl.dart @@ -0,0 +1,36 @@ +import 'dart:async'; + +import '../interface/media_stream_track.dart'; +import '../interface/rtc_rtp_parameters.dart'; +import '../interface/rtc_rtp_receiver.dart'; +import 'media_stream_track_impl.dart'; + +class RTCRtpReceiverNative extends RTCRtpReceiver { + RTCRtpReceiverNative(this._id, this._track, this._parameters); + + factory RTCRtpReceiverNative.fromMap(Map map) { + var track = MediaStreamTrackNative.fromMap(map['track']); + var parameters = RTCRtpParameters.fromMap(map['rtpParameters']); + return RTCRtpReceiverNative(map['receiverId'], track, parameters); + } + + /// private: + String _id; + MediaStreamTrack _track; + RTCRtpParameters _parameters; + + /// The WebRTC specification only defines RTCRtpParameters in terms of senders, + /// but this API also applies them to receivers, similar to ORTC: + /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. + @override + RTCRtpParameters get parameters => _parameters; + + @override + MediaStreamTrack get track => _track; + + @override + String get receiverId => _id; + + @override + Future dispose() async {} +} diff --git a/lib/src/native/rtc_rtp_sender_impl.dart b/lib/src/native/rtc_rtp_sender_impl.dart new file mode 100644 index 0000000000..48b053333f --- /dev/null +++ b/lib/src/native/rtc_rtp_sender_impl.dart @@ -0,0 +1,112 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart'; + +import '../interface/media_stream_track.dart'; +import '../interface/rtc_dtmf_sender.dart'; +import '../interface/rtc_rtp_parameters.dart'; +import '../interface/rtc_rtp_sender.dart'; +import 'media_stream_track_impl.dart'; +import 'rtc_dtmf_sender_impl.dart'; +import 'utils.dart'; + +class RTCRtpSenderNative extends RTCRtpSender { + RTCRtpSenderNative(this._id, this._track, this._dtmf, this._parameters, + this._ownsTrack, this._peerConnectionId); + + factory RTCRtpSenderNative.fromMap(Map map, + {String peerConnectionId}) { + return RTCRtpSenderNative( + map['senderId'], + MediaStreamTrackNative.fromMap(map['track']), + RTCDTMFSenderNative(peerConnectionId, map['senderId']), + RTCRtpParameters.fromMap(map['rtpParameters']), + map['ownsTrack'], + peerConnectionId); + } + + final MethodChannel _channel = WebRTC.methodChannel(); + String _peerConnectionId; + String _id; + MediaStreamTrack _track; + RTCDTMFSender _dtmf; + RTCRtpParameters _parameters; + bool _ownsTrack = false; + + set peerConnectionId(String id) { + _peerConnectionId = id; + } + + @override + Future setParameters(RTCRtpParameters parameters) async { + _parameters = parameters; + try { + final response = await _channel + .invokeMethod('rtpSenderSetParameters', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'parameters': parameters.toMap() + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSender::setParameters: ${e.message}'; + } + } + + @override + Future replaceTrack(MediaStreamTrack track) async { + try { + await _channel.invokeMethod('rtpSenderReplaceTrack', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'trackId': track.id + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSender::replaceTrack: ${e.message}'; + } + } + + @override + Future setTrack(MediaStreamTrack track, + {bool takeOwnership = true}) async { + try { + await _channel.invokeMethod('rtpSenderSetTrack', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'trackId': track.id, + 'takeOwnership': takeOwnership, + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSender::setTrack: ${e.message}'; + } + } + + @override + RTCRtpParameters get parameters => _parameters; + + @override + MediaStreamTrack get track => _track; + + @override + String get senderId => _id; + + @override + bool get ownsTrack => _ownsTrack; + + @override + RTCDTMFSender get dtmfSender => _dtmf; + + @override + @mustCallSuper + Future dispose() async { + try { + await _channel.invokeMethod('rtpSenderDispose', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSender::setTrack: ${e.message}'; + } + } +} diff --git a/lib/src/native/rtc_rtp_transceiver_impl.dart b/lib/src/native/rtc_rtp_transceiver_impl.dart new file mode 100644 index 0000000000..35600e718a --- /dev/null +++ b/lib/src/native/rtc_rtp_transceiver_impl.dart @@ -0,0 +1,146 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import '../interface/enums.dart'; +import '../interface/media_stream.dart'; +import '../interface/rtc_rtp_parameters.dart'; +import '../interface/rtc_rtp_receiver.dart'; +import '../interface/rtc_rtp_sender.dart'; +import '../interface/rtc_rtp_transceiver.dart'; +import 'media_stream_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_sender_impl.dart'; +import 'utils.dart'; + +List listToRtpEncodings(List> list) { + return list.map((e) => RTCRtpEncoding.fromMap(e)).toList(); +} + +class RTCRtpTransceiverInitNative extends RTCRtpTransceiverInit { + RTCRtpTransceiverInitNative(TransceiverDirection direction, + List streams, List sendEncodings) + : super( + direction: direction, + streams: streams, + sendEncodings: sendEncodings); + + factory RTCRtpTransceiverInitNative.fromMap(Map map) { + return RTCRtpTransceiverInitNative( + typeStringToRtpTransceiverDirection[map['direction']], + (map['streams'] as List) + .map((e) => MediaStreamNative.fromMap(map)) + .toList(), + listToRtpEncodings(map['sendEncodings'])); + } + + @override + Map toMap() { + return { + 'direction': typeRtpTransceiverDirectionToString[direction], + if (streams != null) 'streamIds': streams.map((e) => e.id).toList(), + if (sendEncodings != null) + 'sendEncodings': sendEncodings.map((e) => e.toMap()).toList(), + }; + } + + static Map initToMap(RTCRtpTransceiverInit init) { + return { + 'direction': typeRtpTransceiverDirectionToString[init.direction], + if (init.streams != null) + 'streamIds': init.streams.map((e) => e.id).toList(), + if (init.sendEncodings != null) + 'sendEncodings': init.sendEncodings.map((e) => e.toMap()).toList(), + }; + } +} + +class RTCRtpTransceiverNative extends RTCRtpTransceiver { + RTCRtpTransceiverNative(this._id, this._direction, this._mid, this._sender, + this._receiver, _peerConnectionId); + + factory RTCRtpTransceiverNative.fromMap(Map map, + {String peerConnectionId}) { + var transceiver = RTCRtpTransceiverNative( + map['transceiverId'], + typeStringToRtpTransceiverDirection[map['direction']], + map['mid'], + RTCRtpSenderNative.fromMap(map['sender'], + peerConnectionId: peerConnectionId), + RTCRtpReceiverNative.fromMap(map['receiver']), + peerConnectionId); + return transceiver; + } + + final MethodChannel _channel = WebRTC.methodChannel(); + String _peerConnectionId; + String _id; + bool _stop; + TransceiverDirection _direction; + String _mid; + RTCRtpSender _sender; + RTCRtpReceiver _receiver; + + set peerConnectionId(String id) { + _peerConnectionId = id; + } + + @override + TransceiverDirection get currentDirection => _direction; + + @override + String get mid => _mid; + + @override + RTCRtpSender get sender => _sender; + + @override + RTCRtpReceiver get receiver => _receiver; + + @override + bool get stoped => _stop; + + @override + String get transceiverId => _id; + + @override + Future setDirection(TransceiverDirection direction) async { + try { + await _channel + .invokeMethod('rtpTransceiverSetDirection', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id, + 'direction': typeRtpTransceiverDirectionToString[direction] + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::setDirection: ${e.message}'; + } + } + + @override + Future getCurrentDirection() async { + try { + final response = await _channel.invokeMethod( + 'rtpTransceiverGetCurrentDirection', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id + }); + _direction = typeStringToRtpTransceiverDirection[response['result']]; + return _direction; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::getCurrentDirection: ${e.message}'; + } + } + + @override + Future stop() async { + try { + await _channel.invokeMethod('rtpTransceiverStop', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::stop: ${e.message}'; + } + } +} diff --git a/lib/src/native/rtc_track_event_impl.dart b/lib/src/native/rtc_track_event_impl.dart new file mode 100644 index 0000000000..d35a0a2b2e --- /dev/null +++ b/lib/src/native/rtc_track_event_impl.dart @@ -0,0 +1,32 @@ +import '../interface/media_stream.dart'; +import '../interface/media_stream_track.dart'; +import '../interface/rtc_rtp_receiver.dart'; +import '../interface/rtc_rtp_transceiver.dart'; +import '../interface/rtc_track_event.dart'; +import 'media_stream_impl.dart'; +import 'media_stream_track_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_transceiver_impl.dart'; + +class RTCTrackEventNative extends RTCTrackEvent { + RTCTrackEventNative(RTCRtpReceiver receiver, List streams, + MediaStreamTrack track, RTCRtpTransceiver transceiver) + : super( + receiver: receiver, + streams: streams, + track: track, + transceiver: transceiver); + + factory RTCTrackEventNative.fromMap( + Map map, String peerConnectionId) { + var streamsParams = map['streams'] as List>; + var streams = + streamsParams.map((e) => MediaStreamNative.fromMap(e)).toList(); + return RTCTrackEventNative( + RTCRtpReceiverNative.fromMap(map['receiver']), + streams, + MediaStreamTrackNative.fromMap(map['track']), + RTCRtpTransceiverNative.fromMap(map['transceiver'], + peerConnectionId: peerConnectionId)); + } +} diff --git a/lib/src/web/factory_impl.dart b/lib/src/web/factory_impl.dart index 12fbf07927..3e0269f006 100644 --- a/lib/src/web/factory_impl.dart +++ b/lib/src/web/factory_impl.dart @@ -2,9 +2,6 @@ import 'dart:async'; import 'dart:convert'; import 'dart:html' as html; -import 'package:flutter_webrtc/src/interface/rtc_dtmf_sender.dart'; -import 'package:flutter_webrtc/src/web/rtc_dtmf_sender_impl.dart'; - import '../interface/factory.dart'; import '../interface/media_recorder.dart'; import '../interface/media_stream.dart'; diff --git a/lib/src/web/media_stream_impl.dart b/lib/src/web/media_stream_impl.dart index 59cd1dc4b5..8a84282738 100644 --- a/lib/src/web/media_stream_impl.dart +++ b/lib/src/web/media_stream_impl.dart @@ -50,4 +50,9 @@ class MediaStreamWeb extends MediaStream { jsStream.getVideoTracks().forEach((track) => track.stop()); return super.dispose(); } + + @override + List getTracks() { + return [...getAudioTracks(), ...getVideoTracks()]; + } } diff --git a/lib/src/web/rtc_peerconnection_impl.dart b/lib/src/web/rtc_peerconnection_impl.dart index 795902e062..db57c3278e 100644 --- a/lib/src/web/rtc_peerconnection_impl.dart +++ b/lib/src/web/rtc_peerconnection_impl.dart @@ -10,6 +10,9 @@ import '../interface/rtc_data_channel.dart'; import '../interface/rtc_dtmf_sender.dart'; import '../interface/rtc_ice_candidate.dart'; import '../interface/rtc_peerconnection.dart'; +import '../interface/rtc_rtp_receiver.dart'; +import '../interface/rtc_rtp_sender.dart'; +import '../interface/rtc_rtp_transceiver.dart'; import '../interface/rtc_session_description.dart'; import '../interface/rtc_stats_report.dart'; import 'media_stream_impl.dart'; @@ -80,6 +83,12 @@ class RTCPeerConnectionWeb extends RTCPeerConnection { onSignalingState?.call(_signalingState); }); + js.JsObject.fromBrowserObject(_jsPc)['connectionstatechange'] = + js.JsFunction.withThis((_, state) { + _connectionState = peerConnectionStateForString(state); + onConnectionState?.call(_connectionState); + }); + js.JsObject.fromBrowserObject(_jsPc)['negotiationneeded'] = js.JsFunction.withThis(() { onRenegotiationNeeded?.call(); @@ -101,6 +110,7 @@ class RTCPeerConnectionWeb extends RTCPeerConnection { RTCSignalingState _signalingState; RTCIceGatheringState _iceGatheringState; RTCIceConnectionState _iceConnectionState; + RTCPeerConnectionState _connectionState; @override RTCSignalingState get signalingState => _signalingState; @@ -111,6 +121,9 @@ class RTCPeerConnectionWeb extends RTCPeerConnection { @override RTCIceConnectionState get iceConnectionState => _iceConnectionState; + @override + RTCPeerConnectionState get connectionState => _connectionState; + @override Future dispose() { _jsPc.close(); @@ -226,15 +239,6 @@ class RTCPeerConnectionWeb extends RTCPeerConnection { return Future.value(); } - //'audio|video', { 'direction': 'recvonly|sendonly|sendrecv' } - @override - void addTransceiver(String type, Map options) { - if (jsutil.hasProperty(_jsPc, 'addTransceiver')) { - final jsOptions = js.JsObject.jsify(options); - jsutil.callMethod(_jsPc, 'addTransceiver', [type, jsOptions]); - } - } - @override RTCDTMFSender createDtmfSender(MediaStreamTrack track) { var _native = track as MediaStreamTrackWeb; @@ -257,4 +261,63 @@ class RTCPeerConnectionWeb extends RTCPeerConnection { RTCSessionDescription _sessionFromJs(html.RtcSessionDescription sd) => RTCSessionDescription(sd.sdp, sd.type); + + /* + //'audio|video', { 'direction': 'recvonly|sendonly|sendrecv' } + @override + void addTransceiver(String type, Map options) { + if (jsutil.hasProperty(_jsPc, 'addTransceiver')) { + final jsOptions = js.JsObject.jsify(options); + jsutil.callMethod(_jsPc, 'addTransceiver', [type, jsOptions]); + } + } + */ + @override + Future addTrack(MediaStreamTrack track, + [List streams]) { + var _track = track as MediaStreamTrackWeb; + var _stream = streams[0] as MediaStreamWeb; + _jsPc.addTrack(_track.jsTrack, _stream.jsStream); + // TODO: implement addTrack + throw UnimplementedError(); + } + + @override + Future closeSender(RTCRtpSender sender) { + // TODO: implement closeSender + throw UnimplementedError(); + } + + @override + Future createSender(String kind, String streamId) { + // TODO: implement createSender + throw UnimplementedError(); + } + + @override + // TODO: implement receivers + List get receivers => throw UnimplementedError(); + + @override + Future removeTrack(RTCRtpSender sender) { + // TODO: implement removeTrack + throw UnimplementedError(); + } + + @override + // TODO: implement senders + List get senders => throw UnimplementedError(); + + @override + // TODO: implement transceivers + List get transceivers => throw UnimplementedError(); + + @override + Future addTransceiver( + {MediaStreamTrack track, + RTCRtpMediaType kind, + RTCRtpTransceiverInit init}) { + // TODO: implement addTransceiver + throw UnimplementedError(); + } } diff --git a/macos/Classes/FlutterRPScreenRecorder.h b/macos/Classes/FlutterRPScreenRecorder.h new file mode 120000 index 0000000000..a34a3193c9 --- /dev/null +++ b/macos/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.h \ No newline at end of file diff --git a/macos/Classes/FlutterRPScreenRecorder.m b/macos/Classes/FlutterRPScreenRecorder.m new file mode 120000 index 0000000000..f4e4d34067 --- /dev/null +++ b/macos/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDataChannel.h b/macos/Classes/FlutterRTCDataChannel.h deleted file mode 100755 index c2e039f072..0000000000 --- a/macos/Classes/FlutterRTCDataChannel.h +++ /dev/null @@ -1,28 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import - -@interface RTCDataChannel (Flutter) -@property (nonatomic, strong) NSString *peerConnectionId; -@property (nonatomic, strong) NSNumber *flutterChannelId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCDataChannel) - - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(nonnull NSString *)label - config:(nonnull RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger; - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId; - - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(nonnull NSString *)data - type:(nonnull NSString *)type; - -@end diff --git a/macos/Classes/FlutterRTCDataChannel.h b/macos/Classes/FlutterRTCDataChannel.h new file mode 120000 index 0000000000..ca751533c4 --- /dev/null +++ b/macos/Classes/FlutterRTCDataChannel.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDataChannel.m b/macos/Classes/FlutterRTCDataChannel.m deleted file mode 100755 index 0f1295817b..0000000000 --- a/macos/Classes/FlutterRTCDataChannel.m +++ /dev/null @@ -1,165 +0,0 @@ -#import -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCPeerConnection.h" -#import - -@implementation RTCDataChannel (Flutter) - -- (NSString *)peerConnectionId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setPeerConnectionId:(NSString *)peerConnectionId -{ - objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink )eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSNumber *)flutterChannelId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterChannelId:(NSNumber *)flutterChannelId -{ - objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (RTCDataChannel) - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(NSString *)label - config:(RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; - - if (nil != dataChannel) { - dataChannel.peerConnectionId = peerConnectionId; - NSNumber *dataChannelId = [NSNumber numberWithInteger:config.channelId]; - peerConnection.dataChannels[dataChannelId] = dataChannel; - dataChannel.flutterChannelId = dataChannelId; - dataChannel.delegate = self; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnectionId, [dataChannelId intValue]] - binaryMessenger:messenger]; - - dataChannel.eventChannel = eventChannel; - [eventChannel setStreamHandler:dataChannel]; - } -} - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - RTCDataChannel *dataChannel = dataChannels[dataChannelId]; - FlutterEventChannel *eventChannel = dataChannel.eventChannel; - [eventChannel setStreamHandler:nil]; - dataChannel.eventChannel = nil; - [dataChannel close]; - [dataChannels removeObjectForKey:dataChannelId]; -} - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(id)data - type:(NSString *)type -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = peerConnection.dataChannels[dataChannelId]; - - NSData *bytes = [type isEqualToString:@"binary"] ? - ((FlutterStandardTypedData*)data).data : - [data dataUsingEncoding:NSUTF8StringEncoding]; - - RTCDataBuffer *buffer = [[RTCDataBuffer alloc] initWithData:bytes isBinary:[type isEqualToString:@"binary"]]; - [dataChannel sendData:buffer]; -} - -- (NSString *)stringForDataChannelState:(RTCDataChannelState)state -{ - switch (state) { - case RTCDataChannelStateConnecting: return @"connecting"; - case RTCDataChannelStateOpen: return @"open"; - case RTCDataChannelStateClosing: return @"closing"; - case RTCDataChannelStateClosed: return @"closed"; - } - return nil; -} - -#pragma mark - RTCDataChannelDelegate methods - -// Called when the data channel state has changed. -- (void)dataChannelDidChangeState:(RTCDataChannel*)channel -{ - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelStateChanged", - @"id": channel.flutterChannelId, - @"state": [self stringForDataChannelState:channel.readyState]}); - } -} - -// Called when a data buffer was successfully received. -- (void)dataChannel:(RTCDataChannel *)channel didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer -{ - NSString *type; - id data; - if (buffer.isBinary) { - type = @"binary"; - data = [FlutterStandardTypedData typedDataWithBytes:buffer.data]; - } else { - type = @"text"; - data = [[NSString alloc] initWithData:buffer.data - encoding:NSUTF8StringEncoding]; - } - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelReceiveMessage", - @"id": channel.flutterChannelId, - @"type": type, - @"data": (data ? data : [NSNull null])}); - } -} - -@end diff --git a/macos/Classes/FlutterRTCDataChannel.m b/macos/Classes/FlutterRTCDataChannel.m new file mode 120000 index 0000000000..2c6a822406 --- /dev/null +++ b/macos/Classes/FlutterRTCDataChannel.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCapturer.h b/macos/Classes/FlutterRTCFrameCapturer.h deleted file mode 100644 index 4b1e4d7a9f..0000000000 --- a/macos/Classes/FlutterRTCFrameCapturer.h +++ /dev/null @@ -1,8 +0,0 @@ -#import -#import - -@interface FlutterRTCFrameCapturer : NSObject - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result; - -@end diff --git a/macos/Classes/FlutterRTCFrameCapturer.h b/macos/Classes/FlutterRTCFrameCapturer.h new file mode 120000 index 0000000000..b732660b2f --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCapturer.m b/macos/Classes/FlutterRTCFrameCapturer.m deleted file mode 100644 index 4c89d41e78..0000000000 --- a/macos/Classes/FlutterRTCFrameCapturer.m +++ /dev/null @@ -1,81 +0,0 @@ -#import -#import "FlutterRTCFrameCapturer.h" - -#include "libyuv.h" - -@import CoreImage; -@import CoreVideo; - -@implementation FlutterRTCFrameCapturer { - RTCVideoTrack* _track; - NSString* _path; - FlutterResult _result; - bool _gotFrame; -} - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result -{ - self = [super init]; - if (self) { - _gotFrame = false; - _track = track; - _path = path; - _result = result; - [track addRenderer:self]; - } - return self; -} - -- (void)setSize:(CGSize)size -{ -} - -- (void)renderFrame:(nullable RTCVideoFrame *)frame -{ - if (_gotFrame || frame == nil) return; - _gotFrame = true; - - id buffer = frame.buffer; - CVPixelBufferRef pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; - - CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; - CIContext *context = [CIContext contextWithOptions:nil]; - CGImageRef cgImage = [context createCGImage:ciImage - fromRect:CGRectMake(0, 0, frame.width, frame.height)]; -#if 0 //TODO: frame capture - UIImageOrientation orientation; - switch (frame.rotation) { - case RTCVideoRotation_90: - orientation = UIImageOrientationRight; - break; - case RTCVideoRotation_180: - orientation = UIImageOrientationDown; - break; - case RTCVideoRotation_270: - orientation = UIImageOrientationLeft; - default: - orientation = UIImageOrientationUp; - break; - } - - UIImage *uiImage = [UIImage imageWithCGImage:cgImage scale:1 orientation:orientation]; - CGImageRelease(cgImage); - NSData *jpgData = UIImageJPEGRepresentation(uiImage, 0.9f); - - if ([jpgData writeToFile:_path atomically:NO]) { - NSLog(@"File writed successfully to %@", _path); - _result(nil); - } else { - NSLog(@"Failed to write to file"); - _result([FlutterError errorWithCode:@"CaptureFrameFailed" - message:@"Failed to write JPEG data to file" - details:nil]); - } -#endif - dispatch_async(dispatch_get_main_queue(), ^{ - [self->_track removeRenderer:self]; - self->_track = nil; - }); -} - -@end diff --git a/macos/Classes/FlutterRTCFrameCapturer.m b/macos/Classes/FlutterRTCFrameCapturer.m new file mode 120000 index 0000000000..36b15d7c6a --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCMediaStream.h b/macos/Classes/FlutterRTCMediaStream.h deleted file mode 100644 index 255a472380..0000000000 --- a/macos/Classes/FlutterRTCMediaStream.h +++ /dev/null @@ -1,20 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" - -@interface FlutterWebRTCPlugin (RTCMediaStream) - --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)getSources:(FlutterResult)result; - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track - result:(FlutterResult) result; - --(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track - toPath:(NSString *) path - result:(FlutterResult) result; -@end diff --git a/macos/Classes/FlutterRTCMediaStream.h b/macos/Classes/FlutterRTCMediaStream.h new file mode 120000 index 0000000000..a56c382c17 --- /dev/null +++ b/macos/Classes/FlutterRTCMediaStream.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCMediaStream.m b/macos/Classes/FlutterRTCMediaStream.m deleted file mode 100755 index 5b52c9b65b..0000000000 --- a/macos/Classes/FlutterRTCMediaStream.m +++ /dev/null @@ -1,557 +0,0 @@ -#import - -#import - -#import "FlutterRTCFrameCapturer.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCPeerConnection.h" - -@implementation AVCaptureDevice (Flutter) - -- (NSString*)positionString { - switch (self.position) { - case AVCaptureDevicePositionUnspecified: return @"unspecified"; - case AVCaptureDevicePositionBack: return @"back"; - case AVCaptureDevicePositionFront: return @"front"; - } - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCMediaStream) - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} - */ -typedef void (^NavigatorUserMediaErrorCallback)(NSString *errorType, NSString *errorMessage); - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} - */ -typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream *mediaStream); - -- (RTCMediaConstraints *)defaultMediaStreamConstraints { - NSDictionary *mandatoryConstraints - = @{ @"minWidth" : @"1280", - @"minHeight" : @"720", - @"minFrameRate" : @"30" }; - RTCMediaConstraints* constraints = - [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:mandatoryConstraints - optionalConstraints:nil]; - return constraints; -} - -/** - * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the audio-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCAudioTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCAudioTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserAudio:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - NSString *trackId = [[NSUUID UUID] UUIDString]; - RTCAudioTrack *audioTrack - = [self.peerConnectionFactory audioTrackWithTrackId:trackId]; - - [mediaStream addAudioTrack:audioTrack]; - - successCallback(mediaStream); -} - -// TODO: Use RCTConvert for constraints ... --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult) result { - // Initialize RTCMediaStream with a unique label in order to allow multiple - // RTCMediaStream instances initialized by multiple getUserMedia calls to be - // added to 1 RTCPeerConnection instance. As suggested by - // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good - // practice, use a UUID (conforming to RFC4122). - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream - = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - [self - getUserMedia:constraints - successCallback:^ (RTCMediaStream *mediaStream) { - NSString *mediaStreamId = mediaStream.streamId; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in mediaStream.audioTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); - } - errorCallback:^ (NSString *errorType, NSString *errorMessage) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] - message:errorMessage - details:nil]); - } - mediaStream:mediaStream]; -} - -/** - * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which - * satisfies specific constraints and adds it to a specific - * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track - * of the respective media type and the specified {@code constraints} specify - * that a track of the respective media type is required; otherwise, reports - * success for the specified {@code mediaStream} to a specific - * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media - * type-specific iteration of or successfully concludes the - * {@code getUserMedia()} algorithm. The method will be recursively invoked to - * conclude the whole {@code getUserMedia()} algorithm either with (successful) - * satisfaction of the specified {@code constraints} or with failure. - * - * @param constraints The {@code MediaStreamConstraints} which specifies the - * requested media types and which the new {@code RTCAudioTrack} or - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm. - */ -- (void)getUserMedia:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // If mediaStream contains no audioTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local audio content. - if (mediaStream.audioTracks.count == 0) { - // constraints.audio - id audioConstraints = constraints[@"audio"]; - BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; - if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { - [self requestAccessForMediaType:AVMediaTypeAudio - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } - } - - // If mediaStream contains no videoTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local video content. - if (mediaStream.videoTracks.count == 0) { - // constraints.video - id videoConstraints = constraints[@"video"]; - if (videoConstraints) { - BOOL requestAccessForVideo - = [videoConstraints isKindOfClass:[NSNumber class]] - ? [videoConstraints boolValue] - : [videoConstraints isKindOfClass:[NSDictionary class]]; -#if !TARGET_IPHONE_SIMULATOR - if (requestAccessForVideo) { - [self requestAccessForMediaType:AVMediaTypeVideo - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } -#endif - } - } - - // There are audioTracks and/or videoTracks in mediaStream as requested by - // constraints so the getUserMedia() is to conclude with success. - successCallback(mediaStream); -} - -/** - * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the video-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCVideoTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserVideo:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - id videoConstraints = constraints[@"video"]; - AVCaptureDevice *videoDevice; - if ([videoConstraints isKindOfClass:[NSDictionary class]]) { - // constraints.video.optional - id optionalVideoConstraints = videoConstraints[@"optional"]; - if (optionalVideoConstraints - && [optionalVideoConstraints isKindOfClass:[NSArray class]]) { - NSArray *options = optionalVideoConstraints; - for (id item in options) { - if ([item isKindOfClass:[NSDictionary class]]) { - NSString *sourceId = ((NSDictionary *)item)[@"sourceId"]; - if (sourceId) { - videoDevice = [AVCaptureDevice deviceWithUniqueID:sourceId]; - if (videoDevice) { - break; - } - } - } - } - } - if (!videoDevice) { - // constraints.video.facingMode - // - // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode - id facingMode = videoConstraints[@"facingMode"]; - if (facingMode && [facingMode isKindOfClass:[NSString class]]) { - AVCaptureDevicePosition position; - if ([facingMode isEqualToString:@"environment"]) { - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionBack; - } else if ([facingMode isEqualToString:@"user"]) { - self._usingFrontCamera = YES; - position = AVCaptureDevicePositionFront; - } else { - // If the specified facingMode value is not supported, fall back to - // the default video device. - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionUnspecified; - } - videoDevice = [self findDeviceForPosition:position]; - } - } - if (!videoDevice) { - videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - } - - //TODO(rostopira): refactor to separate function and add support for max - - self._targetWidth = 1280; - self._targetHeight = 720; - self._targetFps = 30; - - id mandatory = videoConstraints[@"mandatory"]; - // constraints.video.mandatory - if(mandatory && [mandatory isKindOfClass:[NSDictionary class]]) - { - id widthConstraint = mandatory[@"minWidth"]; - if ([widthConstraint isKindOfClass:[NSString class]]) { - int possibleWidth = [widthConstraint intValue]; - if (possibleWidth != 0) { - self._targetWidth = possibleWidth; - } - } - id heightConstraint = mandatory[@"minHeight"]; - if ([heightConstraint isKindOfClass:[NSString class]]) { - int possibleHeight = [heightConstraint intValue]; - if (possibleHeight != 0) { - self._targetHeight = possibleHeight; - } - } - id fpsConstraint = mandatory[@"minFrameRate"]; - if ([fpsConstraint isKindOfClass:[NSString class]]) { - int possibleFps = [fpsConstraint intValue]; - if (possibleFps != 0) { - self._targetFps = possibleFps; - } - } - } - - if (videoDevice) { - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - if (self.videoCapturer) { - [self.videoCapturer stopCapture]; - } - self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - NSInteger selectedFps = [self selectFpsForFormat:selectedFormat]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:selectedFps completionHandler:^(NSError *error) { - if (error) { - NSLog(@"Start capture error: %@", [error localizedDescription]); - } - }]; - - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - successCallback(mediaStream); - } else { - // According to step 6.2.3 of the getUserMedia() algorithm, if there is no - // source, fail with a new OverconstrainedError. - errorCallback(@"OverconstrainedError", /* errorMessage */ nil); - } -} - --(void)mediaStreamRelease:(RTCMediaStream *)stream -{ - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:stream.streamId]; - } -} - - -/** - * Obtains local media content of a specific type. Requests access for the - * specified {@code mediaType} if necessary. In other words, implements a media - * type-specific iteration of the {@code getUserMedia()} algorithm. - * - * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} - * which specifies the type of the local media content to obtain. - * @param constraints The {@code MediaStreamConstraints} which are to be - * satisfied by the obtained local media content. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is to collect the - * obtained local media content of the specified {@code mediaType}. - */ -- (void)requestAccessForMediaType:(NSString *)mediaType - constraints:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // According to step 6.2.1 of the getUserMedia() algorithm, if there is no - // source, fail "with a new DOMException object whose name attribute has the - // value NotFoundError." - // XXX The following approach does not work for audio in Simulator. That is - // because audio capture is done using AVAudioSession which does not use - // AVCaptureDevice there. Anyway, Simulator will not (visually) request access - // for audio. - if (mediaType == AVMediaTypeVideo - && [AVCaptureDevice devicesWithMediaType:mediaType].count == 0) { - // Since successCallback and errorCallback are asynchronously invoked - // elsewhere, make sure that the invocation here is consistent. - dispatch_async(dispatch_get_main_queue(), ^ { - errorCallback(@"DOMException", @"NotFoundError"); - }); - return; - } - - [AVCaptureDevice - requestAccessForMediaType:mediaType - completionHandler:^ (BOOL granted) { - dispatch_async(dispatch_get_main_queue(), ^ { - if (granted) { - NavigatorUserMediaSuccessCallback scb - = ^ (RTCMediaStream *mediaStream) { - [self getUserMedia:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - }; - - if (mediaType == AVMediaTypeAudio) { - [self getUserAudio:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } else if (mediaType == AVMediaTypeVideo) { - [self getUserVideo:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } - } else { - // According to step 10 Permission Failure of the getUserMedia() - // algorithm, if the user has denied permission, fail "with a new - // DOMException object whose name attribute has the value - // NotAllowedError." - errorCallback(@"DOMException", @"NotAllowedError"); - } - }); - }]; -} - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result { - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - /* TODO: scree capture - FlutterRPScreenRecorder *screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource]; - - [screenCapturer startCapture]; - - //TODO: - self.videoCapturer = screenCapturer; - */ - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); -} - --(void)getSources:(FlutterResult)result{ - NSMutableArray *sources = [NSMutableArray array]; - NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - for (AVCaptureDevice *device in videoDevices) { - [sources addObject:@{ - @"facing": device.positionString, - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"videoinput", - }]; - } - NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; - for (AVCaptureDevice *device in audioDevices) { - [sources addObject:@{ - @"facing": @"", - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"audioinput", - }]; - } - result(@{@"sources": sources}); -} - --(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track -{ - // what's different to mediaStreamTrackStop? only call mediaStream explicitly? - if (mediaStream && track) { - track.isEnabled = NO; - // FIXME this is called when track is removed from the MediaStream, - // but it doesn't mean it can not be added back using MediaStream.addTrack - //TODO: [self.localTracks removeObjectForKey:trackID]; - if ([track.kind isEqualToString:@"audio"]) { - [mediaStream removeAudioTrack:(RTCAudioTrack *)track]; - } else if([track.kind isEqualToString:@"video"]) { - [mediaStream removeVideoTrack:(RTCVideoTrack *)track]; - } - } -} - --(void)mediaStreamTrackSetEnabled:(RTCMediaStreamTrack *)track : (BOOL)enabled -{ - if (track && track.isEnabled != enabled) { - track.isEnabled = enabled; - } -} - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't switch camera"); - return; - } - self._usingFrontCamera = !self._usingFrontCamera; - AVCaptureDevicePosition position = self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; - AVCaptureDevice *videoDevice = [self findDeviceForPosition:position]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:[self selectFpsForFormat:selectedFormat] completionHandler:^(NSError* error){ - if (error != nil) { - result([FlutterError errorWithCode:@"Error while switching camera" message:@"Error while switching camera" details:error]); - } else { - result([NSNumber numberWithBool:self._usingFrontCamera]); - } - }]; -} - --(void)mediaStreamTrackCaptureFrame:(RTCVideoTrack *)track toPath:(NSString *) path result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't capture frame."); - return; - } - - FlutterRTCFrameCapturer *capturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track toPath:path result:result]; -} - --(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track -{ - if (track) { - track.isEnabled = NO; - [self.localTracks removeObjectForKey:track.trackId]; - } -} - -- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position { - if (position == AVCaptureDevicePositionUnspecified) { - return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; - for (AVCaptureDevice *device in captureDevices) { - if (device.position == position) { - return device; - } - } - return captureDevices[0]; -} - -- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device { - NSArray *formats = - [RTCCameraVideoCapturer supportedFormatsForDevice:device]; - AVCaptureDeviceFormat *selectedFormat = nil; - int currentDiff = INT_MAX; - for (AVCaptureDeviceFormat *format in formats) { - CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); - int diff = abs(self._targetWidth - dimension.width) + abs(self._targetHeight - dimension.height); - if (diff < currentDiff) { - selectedFormat = format; - currentDiff = diff; - } else if (diff == currentDiff && pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { - selectedFormat = format; - } - } - return selectedFormat; -} - -- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format { - Float64 maxSupportedFramerate = 0; - for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) { - maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); - } - return fmin(maxSupportedFramerate, self._targetFps); -} - -@end diff --git a/macos/Classes/FlutterRTCMediaStream.m b/macos/Classes/FlutterRTCMediaStream.m new file mode 120000 index 0000000000..2e988ad614 --- /dev/null +++ b/macos/Classes/FlutterRTCMediaStream.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCPeerConnection.h b/macos/Classes/FlutterRTCPeerConnection.h deleted file mode 100755 index b99f885b0a..0000000000 --- a/macos/Classes/FlutterRTCPeerConnection.h +++ /dev/null @@ -1,43 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -@interface RTCPeerConnection (Flutter) -@property (nonatomic, strong) NSMutableDictionary *dataChannels; -@property (nonatomic, strong) NSMutableDictionary *remoteStreams; -@property (nonatomic, strong) NSMutableDictionary *remoteTracks; -@property (nonatomic, strong) NSString *flutterId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result; - --(RTCMediaConstraints *) parseMediaConstraints:(nonnull NSDictionary *)constraints; - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection; - -@end diff --git a/macos/Classes/FlutterRTCPeerConnection.h b/macos/Classes/FlutterRTCPeerConnection.h new file mode 120000 index 0000000000..c4907a3db8 --- /dev/null +++ b/macos/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCPeerConnection.m b/macos/Classes/FlutterRTCPeerConnection.m deleted file mode 100755 index 6517f85ffe..0000000000 --- a/macos/Classes/FlutterRTCPeerConnection.m +++ /dev/null @@ -1,504 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCDataChannel.h" - -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import - -@implementation RTCPeerConnection (Flutter) - -@dynamic eventSink; - -- (NSString *)flutterId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterId:(NSString *)flutterId -{ - objc_setAssociatedObject(self, @selector(flutterId), flutterId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink)eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)dataChannels -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setDataChannels:(NSMutableDictionary *)dataChannels -{ - objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteStreams -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteStreams:(NSMutableDictionary *)remoteStreams -{ - objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteTracks -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteTracks:(NSMutableDictionary *)remoteTracks -{ - objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection -{ - [peerConnection setConfiguration:configuration]; -} - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result -{ - [peerConnection - offerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateOfferFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection - answerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateAnswerFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setLocalDescription:sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetLocalDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setRemoteDescription: sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetRemoteDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection addIceCandidate:candidate]; - result(nil); - //NSLog(@"addICECandidateresult: %@", candidate); -} - --(void) peerConnectionClose:(RTCPeerConnection *)peerConnection -{ - [peerConnection close]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels - = peerConnection.dataChannels; - for (NSString *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; -} - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result -{ - RTCMediaStreamTrack *track = nil; - if (!trackID - || !trackID.length - || (track = self.localTracks[trackID]) - || (track = peerConnection.remoteTracks[trackID])) { - [peerConnection statsForTrack:track - statsOutputLevel:RTCStatsOutputLevelStandard - completionHandler:^(NSArray *reports) { - - NSMutableArray *stats = [NSMutableArray array]; - - for (RTCLegacyStatsReport *report in reports) { - [stats addObject:@{@"id": report.reportId, - @"type": report.type, - @"timestamp": @(report.timestamp), - @"values": report.values - }]; - } - - result(@{@"stats": stats}); - }]; - }else{ - result([FlutterError errorWithCode:@"GetStatsFailed" - message:[NSString stringWithFormat:@"Error %@", @""] - details:nil]); - } -} - -- (NSString *)stringForICEConnectionState:(RTCIceConnectionState)state { - switch (state) { - case RTCIceConnectionStateNew: return @"new"; - case RTCIceConnectionStateChecking: return @"checking"; - case RTCIceConnectionStateConnected: return @"connected"; - case RTCIceConnectionStateCompleted: return @"completed"; - case RTCIceConnectionStateFailed: return @"failed"; - case RTCIceConnectionStateDisconnected: return @"disconnected"; - case RTCIceConnectionStateClosed: return @"closed"; - case RTCIceConnectionStateCount: return @"count"; - } - return nil; -} - -- (NSString *)stringForICEGatheringState:(RTCIceGatheringState)state { - switch (state) { - case RTCIceGatheringStateNew: return @"new"; - case RTCIceGatheringStateGathering: return @"gathering"; - case RTCIceGatheringStateComplete: return @"complete"; - } - return nil; -} - -- (NSString *)stringForSignalingState:(RTCSignalingState)state { - switch (state) { - case RTCSignalingStateStable: return @"stable"; - case RTCSignalingStateHaveLocalOffer: return @"have-local-offer"; - case RTCSignalingStateHaveLocalPrAnswer: return @"have-local-pranswer"; - case RTCSignalingStateHaveRemoteOffer: return @"have-remote-offer"; - case RTCSignalingStateHaveRemotePrAnswer: return @"have-remote-pranswer"; - case RTCSignalingStateClosed: return @"closed"; - } - return nil; -} - - -/** - * Parses the constraint keys and values of a specific JavaScript object into - * a specific NSMutableDictionary in a format suitable for the - * initialization of a RTCMediaConstraints instance. - * - * @param src The JavaScript object which defines constraint keys and values and - * which is to be parsed into the specified dst. - * @param dst The NSMutableDictionary into which the constraint keys - * and values defined by src are to be written in a format suitable for - * the initialization of a RTCMediaConstraints instance. - */ -- (void)parseJavaScriptConstraints:(NSDictionary *)src - intoWebRTCConstraints:(NSMutableDictionary *)dst { - for (id srcKey in src) { - id srcValue = src[srcKey]; - NSString *dstValue; - - if ([srcValue isKindOfClass:[NSNumber class]]) { - dstValue = [srcValue boolValue] ? @"true" : @"false"; - } else { - dstValue = [srcValue description]; - } - dst[[srcKey description]] = dstValue; - } -} - -/** - * Parses a JavaScript object into a new RTCMediaConstraints instance. - * - * @param constraints The JavaScript object to parse into a new - * RTCMediaConstraints instance. - * @returns A new RTCMediaConstraints instance initialized with the - * mandatory and optional constraint keys and values specified by - * constraints. - */ -- (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints { - id mandatory = constraints[@"mandatory"]; - NSMutableDictionary *mandatory_ - = [NSMutableDictionary new]; - - if ([mandatory isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)mandatory - intoWebRTCConstraints:mandatory_]; - } - - id optional = constraints[@"optional"]; - NSMutableDictionary *optional_ - = [NSMutableDictionary new]; - - if ([optional isKindOfClass:[NSArray class]]) { - for (id o in (NSArray *)optional) { - if ([o isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)o - intoWebRTCConstraints:optional_]; - } - } - } - - return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ - optionalConstraints:optional_]; -} - -#pragma mark - RTCPeerConnectionDelegate methods - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingState:(RTCSignalingState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"signalingState", - @"state" : [self stringForSignalingState:newState]}); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didAddTrack:(RTCVideoTrack*)track{ - - peerConnection.remoteTracks[track.trackId] = track; - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didRemoveTrack:(RTCVideoTrack*)track{ - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - NSString *streamId = stream.streamId; - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream { - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in stream.audioTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in stream.videoTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddStream", - @"streamId": streamId, - @"audioTracks": audioTracks, - @"videoTracks": videoTracks, - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream { - NSArray *keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; - // We assume there can be only one object for 1 key - if (keysArray.count > 1) { - NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", stream.streamId); - } - NSString *streamId = stream.streamId; - - for (RTCVideoTrack *track in stream.videoTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - [peerConnection.remoteStreams removeObjectForKey:streamId]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveStream", - @"streamId": streamId, - }); - } -} - -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{@"event" : @"onRenegotiationNeeded",}); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceConnectionState", - @"state" : [self stringForICEConnectionState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceGatheringState", - @"state" : [self stringForICEGatheringState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onCandidate", - @"candidate" : @{@"candidate": candidate.sdp, @"sdpMLineIndex": @(candidate.sdpMLineIndex), @"sdpMid": candidate.sdpMid} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RTCDataChannel*)dataChannel { - if (-1 == dataChannel.channelId) { - return; - } - - NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; - dataChannel.peerConnectionId = peerConnection.flutterId; - dataChannel.delegate = self; - peerConnection.dataChannels[dataChannelId] = dataChannel; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnection.flutterId, dataChannel.channelId] - binaryMessenger:self.messenger]; - - dataChannel.eventChannel = eventChannel; - dataChannel.flutterChannelId = dataChannelId; - [eventChannel setStreamHandler:dataChannel]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"didOpenDataChannel", - @"id": dataChannelId, - @"label": dataChannel.label - }); - } -} - -@end - diff --git a/macos/Classes/FlutterRTCPeerConnection.m b/macos/Classes/FlutterRTCPeerConnection.m new file mode 120000 index 0000000000..363aecf0c7 --- /dev/null +++ b/macos/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCVideoRenderer.h b/macos/Classes/FlutterRTCVideoRenderer.h deleted file mode 100755 index 7fdc81b253..0000000000 --- a/macos/Classes/FlutterRTCVideoRenderer.h +++ /dev/null @@ -1,28 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -@interface FlutterRTCVideoRenderer : NSObject - -/** - * The {@link RTCVideoTrack}, if any, which this instance renders. - */ -@property (nonatomic, strong) RTCVideoTrack *videoTrack; -@property (nonatomic) int64_t textureId; -@property (nonatomic, weak) id registry; -@property (nonatomic, strong) FlutterEventSink eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - -- (void)dispose; - -@end - - -@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view peerConnectionId:(NSString *)peerConnectionId; - -@end diff --git a/macos/Classes/FlutterRTCVideoRenderer.h b/macos/Classes/FlutterRTCVideoRenderer.h new file mode 120000 index 0000000000..2e68777e02 --- /dev/null +++ b/macos/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCVideoRenderer.m b/macos/Classes/FlutterRTCVideoRenderer.m deleted file mode 100755 index 928051960d..0000000000 --- a/macos/Classes/FlutterRTCVideoRenderer.m +++ /dev/null @@ -1,294 +0,0 @@ -#import "FlutterRTCVideoRenderer.h" -#import "FlutterWebRTCPlugin.h" - -#import -#import -#import -#import -#include "libyuv.h" - -@implementation FlutterRTCVideoRenderer { - CGSize _frameSize; - CGSize _renderSize; - CVPixelBufferRef _pixelBufferRef; - RTCVideoRotation _rotation; - FlutterEventChannel* _eventChannel; - bool _isFirstFrameRendered; -} - -@synthesize textureId = _textureId; -@synthesize registry = _registry; -@synthesize eventSink = _eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - self = [super init]; - if (self){ - _isFirstFrameRendered = false; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - _registry = registry; - _pixelBufferRef = nil; - _eventSink = nil; - _rotation = -1; - _textureId = [registry registerTexture:self]; - /*Create Event Channel.*/ - _eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId] - binaryMessenger:messenger]; - [_eventChannel setStreamHandler:self]; - } - return self; -} - --(void)dealloc { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } -} - -- (CVPixelBufferRef)copyPixelBuffer:(size_t)width height:(size_t)height { - if(_pixelBufferRef != nil) { - RTCCVPixelBuffer *rtcPixelbuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:_pixelBufferRef]; - CVPixelBufferRef outbuffer; - CVPixelBufferCreate(kCFAllocatorDefault, - width, height, - kCVPixelFormatType_32BGRA, - nil, &outbuffer); - - [rtcPixelbuffer cropAndScaleTo:outbuffer withTempBuffer:CVPixelBufferGetBaseAddress(outbuffer)]; - return outbuffer; - } - return nil; -} - -- (CVPixelBufferRef)copyPixelBuffer { - if(_pixelBufferRef != nil){ - CVBufferRetain(_pixelBufferRef); - return _pixelBufferRef; - } - return nil; -} - --(void)dispose{ - [_registry unregisterTexture:_textureId]; -} - -- (void)setVideoTrack:(RTCVideoTrack *)videoTrack { - RTCVideoTrack *oldValue = self.videoTrack; - - if (oldValue != videoTrack) { - _isFirstFrameRendered = false; - if (oldValue) { - [oldValue removeRenderer:self]; - } - _videoTrack = videoTrack; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - if (videoTrack) { - [videoTrack addRenderer:self]; - } - } -} - - --(id) correctRotation:(const id) src - withRotation:(RTCVideoRotation) rotation -{ - - int rotated_width = src.width; - int rotated_height = src.height; - - if (rotation == RTCVideoRotation_90 || - rotation == RTCVideoRotation_270) { - int temp = rotated_width; - rotated_width = rotated_height; - rotated_height = temp; - } - - id buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width height:rotated_height]; - - I420Rotate(src.dataY, src.strideY, - src.dataU, src.strideU, - src.dataV, src.strideV, - (uint8_t*)buffer.dataY, buffer.strideY, - (uint8_t*)buffer.dataU,buffer.strideU, - (uint8_t*)buffer.dataV, buffer.strideV, - src.width, src.height, - (RotationModeEnum)rotation); - - return buffer; -} - --(void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer withFrame:(RTCVideoFrame *) frame -{ - id i420Buffer = [self correctRotation:[frame.buffer toI420] withRotation:frame.rotation]; - CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); - - const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); - if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || - pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { - // NV12 - uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); - const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); - uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); - const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); - - I420ToNV12(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dstY, - (int)dstYStride, - dstUV, - (int)dstUVStride, - i420Buffer.width, - i420Buffer.height); - } else { - uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); - const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); - - if (pixelFormat == kCVPixelFormatType_32BGRA) { - // Corresponds to libyuv::FOURCC_ARGB - I420ToARGB(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } else if (pixelFormat == kCVPixelFormatType_32ARGB) { - // Corresponds to libyuv::FOURCC_BGRA - I420ToBGRA(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } - } - - CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); -} - -#pragma mark - RTCVideoRenderer methods -- (void)renderFrame:(RTCVideoFrame *)frame { - - [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; - - __weak FlutterRTCVideoRenderer *weakSelf = self; - if(_renderSize.width != frame.width || _renderSize.height != frame.height){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeVideoSize", - @"id": @(strongSelf.textureId), - @"width": @(frame.width), - @"height": @(frame.height), - }); - } - }); - _renderSize = CGSizeMake(frame.width, frame.height); - } - - if(frame.rotation != _rotation){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeRotation", - @"id": @(strongSelf.textureId), - @"rotation": @(frame.rotation), - }); - } - }); - - _rotation = frame.rotation; - } - - //Notify the Flutter new pixelBufferRef to be ready. - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - [strongSelf.registry textureFrameAvailable:strongSelf.textureId]; - if (!strongSelf->_isFirstFrameRendered) { - if (strongSelf.eventSink) { - strongSelf.eventSink(@{@"event":@"didFirstFrameRendered"}); - strongSelf->_isFirstFrameRendered = true; - } - } - }); -} - -/** - * Sets the size of the video frame to render. - * - * @param size The size of the video frame to render. - */ -- (void)setSize:(CGSize)size { - if(_pixelBufferRef == nil || (size.width != _frameSize.width || size.height != _frameSize.height)) - { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } - NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; - CVPixelBufferCreate(kCFAllocatorDefault, - size.width, size.height, - kCVPixelFormatType_32BGRA, - (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); - - _frameSize = size; - } -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - _eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - _eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger]; -} - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view peerConnectionId:(NSString *)peerConnectionId{ - - RTCVideoTrack *videoTrack; - RTCMediaStream *stream = [self streamForId:streamId peerConnectionId:peerConnectionId]; - if(stream){ - NSArray *videoTracks = stream ? stream.videoTracks : nil; - videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; - if (!videoTrack) { - NSLog(@"No video track for RTCMediaStream: %@", streamId); - } - } else { - videoTrack = nil; - } - - view.videoTrack = videoTrack; -} - -@end - diff --git a/macos/Classes/FlutterRTCVideoRenderer.m b/macos/Classes/FlutterRTCVideoRenderer.m new file mode 120000 index 0000000000..77a0efd6d2 --- /dev/null +++ b/macos/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.m \ No newline at end of file diff --git a/macos/Classes/FlutterWebRTCPlugin.h b/macos/Classes/FlutterWebRTCPlugin.h deleted file mode 100644 index 993169ea64..0000000000 --- a/macos/Classes/FlutterWebRTCPlugin.h +++ /dev/null @@ -1,23 +0,0 @@ -#import -#import -#import - -@class FlutterRTCVideoRenderer; - -@interface FlutterWebRTCPlugin : NSObject - -@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory; -@property (nonatomic, strong) NSMutableDictionary *peerConnections; -@property (nonatomic, strong) NSMutableDictionary *localStreams; -@property (nonatomic, strong) NSMutableDictionary *localTracks; -@property (nonatomic, strong) NSMutableDictionary *renders; -@property (nonatomic, strong) NSObject* messenger; -@property (nonatomic, strong) RTCCameraVideoCapturer *videoCapturer; -@property (nonatomic) BOOL _usingFrontCamera; -@property (nonatomic) int _targetWidth; -@property (nonatomic) int _targetHeight; -@property (nonatomic) int _targetFps; - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId; - -@end diff --git a/macos/Classes/FlutterWebRTCPlugin.h b/macos/Classes/FlutterWebRTCPlugin.h new file mode 120000 index 0000000000..b8713b38ef --- /dev/null +++ b/macos/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.h \ No newline at end of file diff --git a/macos/Classes/FlutterWebRTCPlugin.m b/macos/Classes/FlutterWebRTCPlugin.m deleted file mode 100644 index bfbe4535aa..0000000000 --- a/macos/Classes/FlutterWebRTCPlugin.m +++ /dev/null @@ -1,721 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCVideoRenderer.h" - -#import - -@implementation FlutterWebRTCPlugin { - FlutterMethodChannel *_methodChannel; - id _registry; - id _messenger; - id _textures; -} - -@synthesize messenger = _messenger; - -+ (void)registerWithRegistrar:(NSObject*)registrar { - - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"FlutterWebRTC.Method" - binaryMessenger:[registrar messenger]]; - FlutterWebRTCPlugin* instance = [[FlutterWebRTCPlugin alloc] initWithChannel:channel - registrar:registrar - messenger:[registrar messenger] - withTextures:[registrar textures]]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithChannel:(FlutterMethodChannel *)channel - registrar:(NSObject*)registrar - messenger:(NSObject*)messenger - withTextures:(NSObject *)textures{ - - self = [super init]; - - if (self) { - _methodChannel = channel; - _registry = registrar; - _textures = textures; - _messenger = messenger; - } - - RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init]; - RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init]; - - _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] - initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory]; - - - self.peerConnections = [NSMutableDictionary new]; - self.localStreams = [NSMutableDictionary new]; - self.localTracks = [NSMutableDictionary new]; - self.renders = [[NSMutableDictionary alloc] init]; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result { - - if ([@"createPeerConnection" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* configuration = argsMap[@"configuration"]; - NSDictionary* constraints = argsMap[@"constraints"]; - - RTCPeerConnection *peerConnection = [self.peerConnectionFactory - peerConnectionWithConfiguration:[self RTCConfiguration:configuration] - constraints:[self parseMediaConstraints:constraints] - delegate:self]; - - peerConnection.remoteStreams = [NSMutableDictionary new]; - peerConnection.remoteTracks = [NSMutableDictionary new]; - peerConnection.dataChannels = [NSMutableDictionary new]; - - NSString *peerConnectionId = [[NSUUID UUID] UUIDString]; - peerConnection.flutterId = peerConnectionId; - - /*Create Event Channel.*/ - peerConnection.eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/peerConnectoinEvent%@", peerConnectionId] - binaryMessenger:_messenger]; - [peerConnection.eventChannel setStreamHandler:peerConnection]; - - self.peerConnections[peerConnectionId] = peerConnection; - result(@{ @"peerConnectionId" : peerConnectionId}); - } else if ([@"getUserMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getUserMedia:constraints result:result]; - } else if ([@"getDisplayMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getDisplayMedia:constraints result:result]; - } else if ([@"getSources" isEqualToString:call.method]) { - [self getSources:result]; - } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - [self mediaStreamGetTracks:streamId result:result]; - } else if ([@"createOffer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result ]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"createAnswer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary * constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateAnswer:constraints - peerConnection:peerConnection - result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection addStream:stream]; - result(@""); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"removeStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection removeStream:stream]; - result(nil); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"captureFrame" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* path = argsMap[@"path"]; - NSString* trackId = argsMap[@"trackId"]; - - RTCMediaStreamTrack *track = [self trackForId: trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - if(peerConnection) - { - [self peerConnectionSetLocalDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - - if(peerConnection) - { - [self peerConnectionSetRemoteDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"sendDtmf" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* tone = argsMap[@"tone"]; - int duration = ((NSNumber*)argsMap[@"duration"]).intValue; - int interToneGap = ((NSNumber*)argsMap[@"gap"]).intValue; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - - RTCRtpSender* audioSender = nil ; - for( RTCRtpSender *rtpSender in peerConnection.senders){ - if([[[rtpSender track] kind] isEqualToString:@"audio"]) { - audioSender = rtpSender; - } - } - if(audioSender){ - NSOperationQueue *queue = [[NSOperationQueue alloc] init]; - [queue addOperationWithBlock:^{ - double durationMs = duration / 1000.0; - double interToneGapMs = interToneGap / 1000.0; - [audioSender.dtmfSender insertDtmf :(NSString *)tone - duration:(NSTimeInterval) durationMs interToneGap:(NSTimeInterval)interToneGapMs]; - NSLog(@"DTMF Tone played "); - }]; - } - - result(@{@"result": @"success"}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addCandidate" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* candMap = argsMap[@"candidate"]; - NSString *sdp = candMap[@"candidate"]; - int sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; - NSString *sdpMid = candMap[@"sdpMid"]; - - RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp sdpMLineIndex:sdpMLineIndex sdpMid:sdpMid]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection) - { - [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getStats" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - return [self peerConnectionGetStats:trackId peerConnection:peerConnection result:result]; - result(nil); - } else if ([@"createDataChannel" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* label = argsMap[@"label"]; - NSDictionary * dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; - [self createDataChannel:peerConnectionId - label:label - config:[self RTCDataChannelConfiguration:dataChannelDict] - messenger:_messenger]; - result(nil); - } else if ([@"dataChannelSend" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - NSString* type = argsMap[@"type"]; - id data = argsMap[@"data"]; - - [self dataChannelSend:peerConnectionId - dataChannelId:dataChannelId - data:data - type:type]; - result(nil); - } else if ([@"dataChannelClose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - [self dataChannelClose:peerConnectionId - dataChannelId:dataChannelId]; - result(nil); - } else if ([@"streamDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - RTCVideoSource *source = videoTrack.source; - if(source){ - [self.videoCapturer stopCapture]; - self.videoCapturer = nil; - } - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:streamId]; - } - result(nil); - } else if ([@"mediaStreamTrackSetEnable" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* enabled = argsMap[@"enabled"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil){ - track.isEnabled = enabled.boolValue; - } - result(nil); - } else if ([@"trackDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - [self.localTracks removeObjectForKey:trackId]; - result(nil); - } else if ([@"peerConnectionClose" isEqualToString:call.method] || [@"peerConnectionDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if (peerConnection) { - [peerConnection close]; - [self.peerConnections removeObjectForKey:peerConnectionId]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - for (NSNumber *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; - } - result(nil); - } else if ([@"createVideoRenderer" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - FlutterRTCVideoRenderer* render = [self createWithTextureRegistry:_textures - messenger:_messenger]; - self.renders[@(render.textureId)] = render; - result(@{@"textureId": @(render.textureId)}); - } else if ([@"videoRendererDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - render.videoTrack = nil; - [render dispose]; - [self.renders removeObjectForKey:textureId]; - result(nil); - } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - NSString *streamId = argsMap[@"streamId"]; - NSString *peerConnectionId = argsMap[@"peerConnectionId"]; - if(render){ - [self setStreamId:streamId view:render peerConnectionId:peerConnectionId]; - } - result(nil); - } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackSwitchCamera:videoTrack result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setVolume" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* volume = argsMap[@"volume"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - RTCAudioSource *audioSource = audioTrack.source; - audioSource.volume = [volume doubleValue]; - } - result(nil); - } else if ([@"setMicrophoneMute" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* mute = argsMap[@"mute"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - audioTrack.isEnabled = !mute.boolValue; - } - result(nil); - } else if ([@"enableSpeakerphone" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSNumber* enable = argsMap[@"enable"]; -#if 0 - AVAudioSession *audioSession = [AVAudioSession sharedInstance]; - [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord - withOptions:enable.boolValue ? AVAudioSessionCategoryOptionDefaultToSpeaker : 0 - error:nil]; - [audioSession setActive:YES error:nil]; -#endif - result(nil); - } else if ([@"getLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.localDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.remoteDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setConfiguration" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"ownerTag"]; - NSDictionary* configuration = argsMap[@"configuration"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] peerConnection:peerConnection]; - result(nil); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else { - result(FlutterMethodNotImplemented); - } -} - -- (void)dealloc -{ - [_localTracks removeAllObjects]; - _localTracks = nil; - [_localStreams removeAllObjects]; - _localStreams = nil; - - for (NSString *peerConnectionId in _peerConnections) { - RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId]; - peerConnection.delegate = nil; - [peerConnection close]; - } - [_peerConnections removeAllObjects]; - _peerConnectionFactory = nil; -} - - --(void)mediaStreamGetTracks:(NSString*)streamId - result:(FlutterResult)result { - RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""]; - if(stream){ - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCMediaStreamTrack *track in stream.audioTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [audioTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - for (RTCMediaStreamTrack *track in stream.videoTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [videoTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - result(@{@"audioTracks": audioTracks, @"videoTracks" : videoTracks }); - }else{ - result(nil); - } -} - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId -{ - RTCMediaStream *stream = _localStreams[streamId]; - if (!stream) { - if (peerConnectionId.length > 0) { - RTCPeerConnection *peerConnection = [_peerConnections objectForKey:peerConnectionId]; - stream = peerConnection.remoteStreams[streamId]; - } else { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - stream = peerConnection.remoteStreams[streamId]; - if (stream) { - break; - } - } - } - } - return stream; -} - -- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId -{ - RTCMediaStreamTrack *track = _localTracks[trackId]; - if (!track) { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - track = peerConnection.remoteTracks[trackId]; - if (track) { - break; - } - } - } - - return track; -} - -- (RTCIceServer *)RTCIceServer:(id)json -{ - if (!json) { - NSLog(@"a valid iceServer value"); - return nil; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return nil; - } - - NSArray *urls; - if ([json[@"url"] isKindOfClass:[NSString class]]) { - // TODO: 'url' is non-standard - urls = @[json[@"url"]]; - } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { - urls = @[json[@"urls"]]; - } else { - urls = (NSArray*)json[@"urls"]; - } - - if (json[@"username"] != nil || json[@"credential"] != nil) { - return [[RTCIceServer alloc]initWithURLStrings:urls - username:json[@"username"] - credential:json[@"credential"]]; - } - - return [[RTCIceServer alloc] initWithURLStrings:urls]; -} - - -- (nonnull RTCConfiguration *)RTCConfiguration:(id)json -{ - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - - if (!json) { - return config; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return config; - } - - if (json[@"audioJitterBufferMaxPackets"] != nil && [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { - config.audioJitterBufferMaxPackets = [json[@"audioJitterBufferMaxPackets"] intValue]; - } - - if (json[@"bundlePolicy"] != nil && [json[@"bundlePolicy"] isKindOfClass:[NSString class]]) { - NSString *bundlePolicy = json[@"bundlePolicy"]; - if ([bundlePolicy isEqualToString:@"balanced"]) { - config.bundlePolicy = RTCBundlePolicyBalanced; - } else if ([bundlePolicy isEqualToString:@"max-compat"]) { - config.bundlePolicy = RTCBundlePolicyMaxCompat; - } else if ([bundlePolicy isEqualToString:@"max-bundle"]) { - config.bundlePolicy = RTCBundlePolicyMaxBundle; - } - } - - if (json[@"iceBackupCandidatePairPingInterval"] != nil && [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { - config.iceBackupCandidatePairPingInterval = [json[@"iceBackupCandidatePairPingInterval"] intValue]; - } - - if (json[@"iceConnectionReceivingTimeout"] != nil && [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { - config.iceConnectionReceivingTimeout = [json[@"iceConnectionReceivingTimeout"] intValue]; - } - - if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { - NSMutableArray *iceServers = [NSMutableArray new]; - for (id server in json[@"iceServers"]) { - RTCIceServer *convert = [self RTCIceServer:server]; - if (convert != nil) { - [iceServers addObject:convert]; - } - } - config.iceServers = iceServers; - } - - if (json[@"iceTransportPolicy"] != nil && [json[@"iceTransportPolicy"] isKindOfClass:[NSString class]]) { - NSString *iceTransportPolicy = json[@"iceTransportPolicy"]; - if ([iceTransportPolicy isEqualToString:@"all"]) { - config.iceTransportPolicy = RTCIceTransportPolicyAll; - } else if ([iceTransportPolicy isEqualToString:@"none"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNone; - } else if ([iceTransportPolicy isEqualToString:@"nohost"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNoHost; - } else if ([iceTransportPolicy isEqualToString:@"relay"]) { - config.iceTransportPolicy = RTCIceTransportPolicyRelay; - } - } - - if (json[@"rtcpMuxPolicy"] != nil && [json[@"rtcpMuxPolicy"] isKindOfClass:[NSString class]]) { - NSString *rtcpMuxPolicy = json[@"rtcpMuxPolicy"]; - if ([rtcpMuxPolicy isEqualToString:@"negotiate"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate; - } else if ([rtcpMuxPolicy isEqualToString:@"require"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyRequire; - } - } - - if (json[@"tcpCandidatePolicy"] != nil && [json[@"tcpCandidatePolicy"] isKindOfClass:[NSString class]]) { - NSString *tcpCandidatePolicy = json[@"tcpCandidatePolicy"]; - if ([tcpCandidatePolicy isEqualToString:@"enabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyEnabled; - } else if ([tcpCandidatePolicy isEqualToString:@"disabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled; - } - } - - if (json[@"sdpSemantics"] != nil && [json[@"sdpSemantics"] isKindOfClass:[NSString class]]) { - NSString *sdpSemantics = json[@"sdpSemantics"]; - if ([sdpSemantics isEqualToString:@"plan-b"]) { - config.sdpSemantics = RTCSdpSemanticsPlanB; - } else if ([sdpSemantics isEqualToString:@"unified-plan"]) { - config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; - } - } - - return config; -} - -- (RTCDataChannelConfiguration *)RTCDataChannelConfiguration:(id)json -{ - if (!json) { - return nil; - } - if ([json isKindOfClass:[NSDictionary class]]) { - RTCDataChannelConfiguration *init = [RTCDataChannelConfiguration new]; - - if (json[@"id"]) { - [init setChannelId:(int)[json[@"id"] integerValue]]; - } - if (json[@"ordered"]) { - init.isOrdered = [json[@"ordered"] boolValue]; - } - if (json[@"maxRetransmitTime"]) { - init.maxRetransmitTimeMs = [json[@"maxRetransmitTime"] integerValue]; - } - if (json[@"maxRetransmits"]) { - init.maxRetransmits = [json[@"maxRetransmits"] intValue]; - } - if (json[@"negotiated"]) { - init.isNegotiated = [json[@"negotiated"] boolValue]; - } - if (json[@"protocol"]) { - init.protocol = json[@"protocol"]; - } - return init; - } - return nil; -} - -- (CGRect)parseRect:(NSDictionary *)rect { - return CGRectMake([[rect valueForKey:@"left"] doubleValue], - [[rect valueForKey:@"top"] doubleValue], - [[rect valueForKey:@"width"] doubleValue], - [[rect valueForKey:@"height"] doubleValue]); -} - -@end diff --git a/macos/Classes/FlutterWebRTCPlugin.m b/macos/Classes/FlutterWebRTCPlugin.m new file mode 120000 index 0000000000..7d5cc6ca16 --- /dev/null +++ b/macos/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.m \ No newline at end of file diff --git a/macos/Classes/FlutterWebRTCPlugin.swift b/macos/Classes/FlutterWebRTCPlugin.swift deleted file mode 100644 index 9521120a03..0000000000 --- a/macos/Classes/FlutterWebRTCPlugin.swift +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import FlutterMacOS -import Foundation - -public class FLEFlutterWebRTCPlugin: NSObject, FlutterPlugin { - public static func register(with registrar: FlutterPluginRegistrar) { - FlutterWebRTCPlugin.register(with: registrar) - } -}