From 41291c655e3838401d4960a48d6893df90e8801a Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Mon, 28 Sep 2020 20:10:00 +0800 Subject: [PATCH 01/26] Add API and files for unified-plan. --- lib/src/media_stream_track.dart | 5 + lib/src/rtc_peerconnection.dart | 132 +++++++++++++++++++++ lib/src/rtc_rtcp_parameters.dart | 19 +++ lib/src/rtc_rtp_parameters.dart | 191 +++++++++++++++++++++++++++++++ lib/src/rtc_rtp_receiver.dart | 79 +++++++++++++ lib/src/rtc_rtp_sender.dart | 93 +++++++++++++++ lib/src/rtc_rtp_transceiver.dart | 127 ++++++++++++++++++++ 7 files changed, 646 insertions(+) create mode 100644 lib/src/rtc_rtcp_parameters.dart create mode 100644 lib/src/rtc_rtp_parameters.dart create mode 100644 lib/src/rtc_rtp_receiver.dart create mode 100644 lib/src/rtc_rtp_sender.dart create mode 100644 lib/src/rtc_rtp_transceiver.dart diff --git a/lib/src/media_stream_track.dart b/lib/src/media_stream_track.dart index fa41a9e258..a399c1d6e7 100644 --- a/lib/src/media_stream_track.dart +++ b/lib/src/media_stream_track.dart @@ -3,6 +3,11 @@ import 'utils.dart'; class MediaStreamTrack { MediaStreamTrack(this._trackId, this._label, this._kind, this._enabled); + factory MediaStreamTrack.fromMap(Map map) { + return MediaStreamTrack( + map['id'], map['label'], map['kind'], map['enabled']); + } + final _channel = WebRTC.methodChannel(); final String _trackId; final String _label; diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index 6c090a1a13..7520c22af4 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -8,6 +8,9 @@ import 'media_stream_track.dart'; import 'rtc_data_channel.dart'; import 'rtc_dtmf_sender.dart'; import 'rtc_ice_candidate.dart'; +import 'rtc_rtp_receiver.dart'; +import 'rtc_rtp_sender.dart'; +import 'rtc_rtp_transceiver.dart'; import 'rtc_session_description.dart'; import 'rtc_stats_report.dart'; import 'utils.dart'; @@ -28,6 +31,11 @@ typedef RemoveTrackCallback = void Function( typedef RTCDataChannelCallback = void Function(RTCDataChannel channel); typedef RenegotiationNeededCallback = void Function(); +/// Unified-Plan +typedef UnifiedPlanAddTrackCallback = void Function(RTCRtpReceiver receiver, + [List mediaStreams]); +typedef UnifiedPlanTrackCallback = void Function(RTCRtpTransceiver transceiver); + /* * PeerConnection */ @@ -44,6 +52,9 @@ class RTCPeerConnection { StreamSubscription _eventSubscription; final _localStreams = []; final _remoteStreams = []; + final List _senders = []; + final List _receivers = []; + final List _transceivers = []; RTCDataChannel _dataChannel; Map _configuration; RTCSignalingState _signalingState; @@ -61,6 +72,12 @@ class RTCPeerConnection { RTCDataChannelCallback onDataChannel; RenegotiationNeededCallback onRenegotiationNeeded; + /// Unified-Plan + // TODO(cloudwebrtc): for unified-plan. + UnifiedPlanAddTrackCallback onAddTrack2; + UnifiedPlanTrackCallback onTrack; + UnifiedPlanTrackCallback onRemoveTrack2; + final Map defaultSdpConstraints = { 'mandatory': { 'OfferToReceiveAudio': true, @@ -169,6 +186,11 @@ class RTCPeerConnection { case 'onRenegotiationNeeded': onRenegotiationNeeded?.call(); break; + + /// Unified-Plan + case 'onTrack': + onTrack?.call(RTCRtpTransceiver.fromMap(map['transceiver'])); + break; } } @@ -361,6 +383,116 @@ class RTCPeerConnection { return RTCDTMFSender(_peerConnectionId); } + /// Unified-Plan. + List get senders => _senders; + + List get receivers => _receivers; + + List get transceivers => _transceivers; + + Future createSender(String kind, String streamId) async { + try { + final response = await _channel.invokeMethod( + 'createSender', { + 'peerConnectionId': _peerConnectionId, + 'kind': kind, + 'streamId': streamId + }); + var sender = RTCRtpSender.fromMap(response); + _senders.add(sender); + return sender; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::createSender: ${e.message}'; + } + } + + Future addTrack(MediaStreamTrack track, + [List streamIds]) async { + try { + final response = + await _channel.invokeMethod('addTrack', { + 'peerConnectionId': _peerConnectionId, + 'trackId': track.id, + 'streamIds': streamIds + }); + var sender = RTCRtpSender.fromMap(response); + _senders.add(sender); + return sender; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTrack: ${e.message}'; + } + } + + Future removeTrack(RTCRtpSender sender) async { + try { + final response = await _channel.invokeMethod( + 'removeTrack', { + 'peerConnectionId': _peerConnectionId, + 'senderId': sender.senderId + }); + bool result = response['result']; + _senders.removeWhere((item) { + return sender.senderId == item.senderId; + }); + return result; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::removeTrack: ${e.message}'; + } + } + + Future closeSender(RTCRtpSender sender) async { + try { + final response = await _channel.invokeMethod( + 'closeSender', { + 'peerConnectionId': _peerConnectionId, + 'senderId': sender.senderId + }); + bool result = response['result']; + _senders.removeWhere((item) { + return sender.senderId == item.senderId; + }); + return result; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::removeTrack: ${e.message}'; + } + } + + Future addTransceiver(MediaStreamTrack track, + [RTCRtpTransceiverInit init]) async { + try { + final response = + await _channel.invokeMethod('addTransceiver', { + 'peerConnectionId': _peerConnectionId, + 'trackId': track.id, + 'transceiverInit': init?.toMap() + }); + var transceiver = RTCRtpTransceiver.fromMap(response); + transceiver.peerConnectionId = _peerConnectionId; + _transceivers.add(transceiver); + return transceiver; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTransceiver: ${e.message}'; + } + } + + Future addTransceiverOfType(RTCRtpMediaType mediaType, + [RTCRtpTransceiverInit init]) async { + try { + final response = + await _channel.invokeMethod('addTransceiverOfType', { + 'peerConnectionId': _peerConnectionId, + 'mediaType': typeRTCRtpMediaTypetoString[mediaType], + 'transceiverInit': init?.toMap() + }); + var transceiver = RTCRtpTransceiver.fromMap(response); + transceiver.peerConnectionId = _peerConnectionId; + _transceivers.add(transceiver); + return transceiver; + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::addTransceiver: ${e.message}'; + } + } + Future close() async { try { await _channel.invokeMethod('peerConnectionClose', { diff --git a/lib/src/rtc_rtcp_parameters.dart b/lib/src/rtc_rtcp_parameters.dart new file mode 100644 index 0000000000..5c2ee56f6d --- /dev/null +++ b/lib/src/rtc_rtcp_parameters.dart @@ -0,0 +1,19 @@ +class RTCRTCPParameters { + RTCRTCPParameters(this.cname, this.reducedSize); + factory RTCRTCPParameters.fromMap(Map map) { + return RTCRTCPParameters(map['cname'], map['reducedSize']); + } + + /// The Canonical Name used by RTCP + String cname; + + /// Whether reduced size RTCP is configured or compound RTCP + bool reducedSize; + + Map toMap() { + return { + 'cname': cname, + 'reducedSize': reducedSize, + }; + } +} diff --git a/lib/src/rtc_rtp_parameters.dart b/lib/src/rtc_rtp_parameters.dart new file mode 100644 index 0000000000..ffc2f3d5aa --- /dev/null +++ b/lib/src/rtc_rtp_parameters.dart @@ -0,0 +1,191 @@ +import 'rtc_rtcp_parameters.dart'; + +class RTCRTPCodec { + RTCRTPCodec(this.payloadType, this.name, this.kind, this.clockRate, + this.numChannels, this.parameters); + + factory RTCRTPCodec.fromMap(Map map) { + return RTCRTPCodec(map['payloadType'], map['name'], map['kind'], + map['clockRate'], map['numChannels'], map['parameters']); + } + // Payload type used to identify this codec in RTP packets. + int payloadType; + + /// Name used to identify the codec. Equivalent to MIME subtype. + String name; + + /// The media type of this codec. Equivalent to MIME top-level type. + String kind; + + /// Clock rate in Hertz. + int clockRate; + + /// The number of audio channels used. Set to null for video codecs. + int numChannels; + + /// The "format specific parameters" field from the "a=fmtp" line in the SDP + Map parameters; + + Map toMap() { + return { + 'payloadType': payloadType, + 'name': name, + 'kind': kind, + 'clockRate': clockRate, + 'numChannels': numChannels, + 'parameters': parameters, + }; + } +} + +class RTCRtpEncoding { + RTCRtpEncoding( + this.rid, + this.active, + this.maxBitrateBps, + this.maxFramerate, + this.minBitrateBps, + this.numTemporalLayers, + this.scaleResolutionDownBy, + this.ssrc); + + factory RTCRtpEncoding.fromMap(Map map) { + return RTCRtpEncoding( + map['rid'], + map['active'], + map['maxBitrateBps'], + map['maxFramerate'], + map['minBitrateBps'], + map['numTemporalLayers'], + map['scaleResolutionDownBy'], + map['ssrc']); + } + + /// If non-null, this represents the RID that identifies this encoding layer. + /// RIDs are used to identify layers in simulcast. + String rid; + + /// Set to true to cause this encoding to be sent, and false for it not to + /// be sent. + bool active = true; + + /// If non-null, this represents the Transport Independent Application + /// Specific maximum bandwidth defined in RFC3890. If null, there is no + /// maximum bitrate. + int maxBitrateBps; + + /// The minimum bitrate in bps for video. + int minBitrateBps; + + /// The max framerate in fps for video. + int maxFramerate; + + /// The number of temporal layers for video. + int numTemporalLayers; + + /// If non-null, scale the width and height down by this factor for video. If null, + /// implementation default scaling factor will be used. + double scaleResolutionDownBy; + + /// SSRC to be used by this encoding. + /// Can't be changed between getParameters/setParameters. + int ssrc; + + Map toMap() { + return { + 'rid': rid, + 'active': active, + 'maxBitrateBps': maxBitrateBps, + 'maxFramerate': maxFramerate, + 'minBitrateBps': minBitrateBps, + 'numTemporalLayers': numTemporalLayers, + 'scaleResolutionDownBy': scaleResolutionDownBy, + 'ssrc': ssrc, + }; + } +} + +class RTCHeaderExtension { + RTCHeaderExtension(this.uri, this.id, this.encrypted); + factory RTCHeaderExtension.fromMap(Map map) { + return RTCHeaderExtension(map['uri'], map['id'], map['encrypted']); + } + + /// The URI of the RTP header extension, as defined in RFC5285. + String uri; + + /// The value put in the RTP packet to identify the header extension. + int id; + + /// Whether the header extension is encrypted or not. + bool encrypted; + + Map toMap() { + return { + 'uri': uri, + 'id': id, + 'encrypted': encrypted, + }; + } +} + +class RTCRtpParameters { + RTCRtpParameters(this.transactionId, this.rtcp, this.headerExtensions, + this.encodings, this.codecs); + + factory RTCRtpParameters.fromMap(Map map) { + var encodings = []; + dynamic encodingsMap = map['encodings']; + encodingsMap.forEach((params) { + encodings.add(RTCRtpEncoding.fromMap(params)); + }); + var headerExtensions = []; + dynamic headerExtensionsMap = map['headerExtensions']; + headerExtensionsMap.forEach((params) { + headerExtensions.add(RTCHeaderExtension.fromMap(params)); + }); + var codecs = []; + dynamic codecsMap = map['codecs']; + codecsMap.forEach((params) { + codecs.add(RTCRTPCodec.fromMap(params)); + }); + var rtcp = RTCRTCPParameters.fromMap(map['rtcp']); + return RTCRtpParameters( + map['transactionId'], rtcp, headerExtensions, encodings, codecs); + } + + String transactionId; + + RTCRTCPParameters rtcp; + + List headerExtensions; + + List encodings; + + /// Codec parameters can't currently be changed between getParameters and + /// setParameters. Though in the future it will be possible to reorder them or + /// remove them. + List codecs; + + Map toMap() { + var headerExtensionsList = []; + headerExtensions.forEach((params) { + headerExtensionsList.add(params.toMap()); + }); + var encodingList = []; + encodings.forEach((params) { + encodingList.add(params.toMap()); + }); + var codecsList = []; + codecs.forEach((params) { + codecsList.add(params.toMap()); + }); + return { + 'transactionId': transactionId, + 'rtcp': rtcp.toMap(), + 'headerExtensions': headerExtensionsList, + 'encodings': encodingList, + 'codecs': codecsList, + }; + } +} diff --git a/lib/src/rtc_rtp_receiver.dart b/lib/src/rtc_rtp_receiver.dart new file mode 100644 index 0000000000..0877d24598 --- /dev/null +++ b/lib/src/rtc_rtp_receiver.dart @@ -0,0 +1,79 @@ +import 'dart:async'; +import 'package:flutter/services.dart'; + +import 'media_stream_track.dart'; +import 'rtc_rtp_parameters.dart'; + +import 'utils.dart'; + +enum RTCRtpMediaType { + RTCRtpMediaTypeAudio, + RTCRtpMediaTypeVideo, + RTCRtpMediaTypeData, +} + +final typeRTCRtpMediaTypetoString = { + RTCRtpMediaType.RTCRtpMediaTypeAudio: 'audio', + RTCRtpMediaType.RTCRtpMediaTypeVideo: 'video', + RTCRtpMediaType.RTCRtpMediaTypeData: 'data', +}; + +final typeStringToRTCRtpMediaType = { + 'audio': RTCRtpMediaType.RTCRtpMediaTypeAudio, + 'video': RTCRtpMediaType.RTCRtpMediaTypeVideo, + 'data': RTCRtpMediaType.RTCRtpMediaTypeData, +}; + +typedef OnFirstPacketReceivedCallback = void Function( + RTCRtpReceiver rtpReceiver, RTCRtpMediaType mediaType); + +class RTCRtpReceiver { + RTCRtpReceiver(this._id, this._track, this._parameters); + + factory RTCRtpReceiver.fromMap(Map map) { + var track = MediaStreamTrack.fromMap(map['track']); + var parameters = RTCRtpParameters.fromMap(map['rtpParameters']); + return RTCRtpReceiver(map['receiverId'], track, parameters); + } + + /// private: + final MethodChannel _channel = WebRTC.methodChannel(); + String _peerConnectionId; + String _id; + MediaStreamTrack _track; + RTCRtpParameters _parameters; + + /// public: + OnFirstPacketReceivedCallback onFirstPacketReceived; + + set peerConnectionId(String id) { + _peerConnectionId = id; + } + + /// Currently, doesn't support changing any parameters, but may in the future. + Future setParameters(RTCRtpParameters parameters) async { + _parameters = parameters; + try { + final response = await _channel + .invokeMethod('rtpReceiverSetParameters', { + 'peerConnectionId': _peerConnectionId, + 'rtpReceiverId': _id, + 'parameters': parameters.toMap() + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpReceiver::setParameters: ${e.message}'; + } + } + + /// The WebRTC specification only defines RTCRtpParameters in terms of senders, + /// but this API also applies them to receivers, similar to ORTC: + /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. + RTCRtpParameters get parameters => _parameters; + + MediaStreamTrack get track => _track; + + String get receiverId => _id; + + Future dispose() async {} +} diff --git a/lib/src/rtc_rtp_sender.dart b/lib/src/rtc_rtp_sender.dart new file mode 100644 index 0000000000..9a74465eb6 --- /dev/null +++ b/lib/src/rtc_rtp_sender.dart @@ -0,0 +1,93 @@ +import 'dart:async'; +import 'package:flutter/services.dart'; + +import 'media_stream_track.dart'; +import 'rtc_dtmf_sender.dart'; +import 'rtc_rtp_parameters.dart'; +import 'utils.dart'; + +class RTCRtpSender { + RTCRtpSender( + this._id, this._track, this._dtmf, this._parameters, this._ownsTrack); + + factory RTCRtpSender.fromMap(Map map) { + return RTCRtpSender( + map['senderId'], + MediaStreamTrack.fromMap(map['track']), + RTCDTMFSender(map['peerConnectionId']), + RTCRtpParameters.fromMap(map['rtpParameters']), + map['ownsTrack']); + } + + final MethodChannel _channel = WebRTC.methodChannel(); + String _peerConnectionId; + String _id; + MediaStreamTrack _track; + RTCDTMFSender _dtmf; + RTCRtpParameters _parameters; + bool _ownsTrack = false; + + set peerConnectionId(String id) { + _peerConnectionId = id; + } + + Future setParameters(RTCRtpParameters parameters) async { + _parameters = parameters; + try { + final response = await _channel + .invokeMethod('rtpSenderSetParameters', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'parameters': parameters.toMap() + }); + return response['result']; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSender::setParameters: ${e.message}'; + } + } + + Future replaceTrack(MediaStreamTrack track) async { + try { + await _channel.invokeMethod('rtpSenderReplaceTrack', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'trackId': track.id + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSender::replaceTrack: ${e.message}'; + } + } + + Future setTrack(MediaStreamTrack track, bool takeOwnership) async { + try { + await _channel.invokeMethod('rtpSenderSetTrack', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _id, + 'trackId': track.id, + 'takeOwnership': takeOwnership, + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSender::setTrack: ${e.message}'; + } + } + + RTCRtpParameters get parameters => _parameters; + + MediaStreamTrack get track => _track; + + String get senderId => _id; + + bool get ownsTrack => _ownsTrack; + + RTCDTMFSender get dtmfSender => _dtmf; + + Future dispose() async { + try { + await _channel.invokeMethod('rtpSenderDispose', { + 'rtpSenderId': _id, + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpSender::setTrack: ${e.message}'; + } + } +} diff --git a/lib/src/rtc_rtp_transceiver.dart b/lib/src/rtc_rtp_transceiver.dart new file mode 100644 index 0000000000..ea84df424c --- /dev/null +++ b/lib/src/rtc_rtp_transceiver.dart @@ -0,0 +1,127 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import 'rtc_rtp_receiver.dart'; +import 'rtc_rtp_sender.dart'; +import 'utils.dart'; + +enum RTCRtpTransceiverDirection { + RTCRtpTransceiverDirectionSendRecv, + RTCRtpTransceiverDirectionSendOnly, + RTCRtpTransceiverDirectionRecvOnly, + RTCRtpTransceiverDirectionInactive, +} + +final typeStringToRtpTransceiverDirection = + { + 'sendrecv': RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionSendRecv, + 'sendonly': RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionSendOnly, + 'recvonly': RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionRecvOnly, + 'inactive': RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionInactive, +}; + +final typeRtpTransceiverDirectionToString = + { + RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionSendRecv: 'sendrecv', + RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionSendOnly: 'sendonly', + RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionRecvOnly: 'recvonly', + RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionInactive: 'inactive', +}; + +class RTCRtpTransceiverInit { + RTCRtpTransceiverInit(this.direction, this.streamIds); + + factory RTCRtpTransceiverInit.fromMap(Map map) { + return RTCRtpTransceiverInit( + typeStringToRtpTransceiverDirection[map['direction']], + map['streamIds']); + } + RTCRtpTransceiverDirection direction; + List streamIds; + + Map toMap() { + return { + 'direction': typeRtpTransceiverDirectionToString[direction], + 'streamIds': streamIds + }; + } +} + +class RTCRtpTransceiver { + RTCRtpTransceiver( + this._id, this._direction, this._mid, this._sender, this._receiver); + + factory RTCRtpTransceiver.fromMap(Map map) { + var transceiver = RTCRtpTransceiver( + map['transceiverId'], + typeStringToRtpTransceiverDirection[map['direction']], + map['mid'], + RTCRtpSender.fromMap(map['sender']), + RTCRtpReceiver.fromMap(map['receiver'])); + return transceiver; + } + + final MethodChannel _channel = WebRTC.methodChannel(); + String _peerConnectionId; + String _id; + bool _stop; + RTCRtpTransceiverDirection _direction; + String _mid; + RTCRtpSender _sender; + RTCRtpReceiver _receiver; + + set peerConnectionId(String id) { + _peerConnectionId = id; + } + + RTCRtpTransceiverDirection get currentDirection => _direction; + + String get mid => _mid; + + RTCRtpSender get sender => _sender; + + RTCRtpReceiver get receiver => _receiver; + + bool get stoped => _stop; + + String get transceiverId => _id; + + Future setDirection(RTCRtpTransceiverDirection direction) async { + try { + await _channel + .invokeMethod('rtpTransceiverSetDirection', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id, + 'direction': typeRtpTransceiverDirectionToString[direction] + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::setDirection: ${e.message}'; + } + } + + Future getCurrentDirection() async { + try { + final response = await _channel.invokeMethod( + 'rtpTransceiverGetCurrentDirection', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id + }); + _direction = typeStringToRtpTransceiverDirection[response['result']]; + return _direction; + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::getCurrentDirection: ${e.message}'; + } + } + + Future stop() async { + try { + await _channel.invokeMethod('rtpTransceiverStop', { + 'peerConnectionId': _peerConnectionId, + 'transceiverId': _id + }); + } on PlatformException catch (e) { + throw 'Unable to RTCRtpTransceiver::stop: ${e.message}'; + } + } +} From e82f8f66d084246617760141d91fae8b6500f7ea Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Fri, 2 Oct 2020 21:21:48 +0800 Subject: [PATCH 02/26] Add more. --- .../webrtc/MethodCallHandlerImpl.java | 157 +++++++++++++ .../webrtc/PeerConnectionObserver.java | 219 ++++++++++++++++++ .../webrtc/utils/ConstraintsMap.java | 4 + 3 files changed, 380 insertions(+) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index 21f8c91975..90e7fb1b21 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -558,6 +558,88 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { } break; } + case "createSender": { + String peerConnectionId = call.argument("peerConnectionId"); + String kind = call.argument("kind"); + String streamId = call.argument("streamId"); + createSender(peerConnectionId, kind, streamId, result); + break; + } + case "closeSender": { + String peerConnectionId = call.argument("peerConnectionId"); + String senderId = call.argument("senderId"); + stopSender(peerConnectionId, senderId, result); + break; + } + case "addTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String trackId = call.argument("trackId"); + List streamIds = call.argument("streamIds"); + addTrack(peerConnectionId, trackId, streamIds, result); + break; + } + case "removeTrack": { + String peerConnectionId = call.argument("peerConnectionId"); + String senderId = call.argument("senderId"); + removeTrack(peerConnectionId, senderId, result); + break; + } + case "addTransceiver": { + String peerConnectionId = call.argument("peerConnectionId"); + String trackId = call.argument("trackId"); + Map transceiverInit = call.argument("transceiverInit"); + addTransceiver(peerConnectionId, trackId, transceiverInit, result); + break; + } + case "addTransceiverOfType": { + String peerConnectionId = call.argument("peerConnectionId"); + String mediaType = call.argument("mediaType"); + Map transceiverInit = call.argument("transceiverInit"); + addTransceiverOfType(peerConnectionId, mediaType, transceiverInit, result); + break; + } + case "rtpTransceiverSetDirection": { + String peerConnectionId = call.argument("peerConnectionId"); + String direction = call.argument("direction"); + String transceiverId = call.argument("transceiverId"); + + result.notImplemented(); + break; + } + case "rtpTransceiverGetCurrentDirection": { + String peerConnectionId = call.argument("peerConnectionId"); + String transceiverId = call.argument("transceiverId"); + + result.notImplemented(); + break; + } + case "rtpTransceiverStop": { + String peerConnectionId = call.argument("peerConnectionId"); + String transceiverId = call.argument("transceiverId"); + + result.notImplemented(); + break; + } + case "rtpSenderSetParameters": { + result.notImplemented(); + break; + } + case "rtpSenderReplaceTrack": { + result.notImplemented(); + break; + } + case "rtpSenderSetTrack": { + result.notImplemented(); + break; + } + case "rtpSenderDispose": { + result.notImplemented(); + break; + } + case "rtpReceiverSetParameters": { + result.notImplemented(); + break; + } default: result.notImplemented(); break; @@ -1438,4 +1520,79 @@ public void dataChannelClose(String peerConnectionId, int dataChannelId) { public void setActivity(Activity activity) { this.activity = activity; } + + public void createSender(String peerConnectionId, String kind, String streamId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "createSender() peerConnection is null"); + result.error("createSender", "createSender() peerConnection is null", null); + } else { + pco.createSender(kind, streamId, result); + } + } + + + public void stopSender(String peerConnectionId, String senderId, Result result) { + PeerConnectionObserver pco + = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "removeTrack() peerConnection is null"); + result.error("removeTrack", "removeTrack() peerConnection is null", null); + } else { + pco.closeSender(senderId, result); + } + } + + public void addTrack(String peerConnectionId, String trackId, List streamIds, Result result){ + PeerConnectionObserver pco + = mPeerConnectionObservers.get(peerConnectionId); + MediaStreamTrack track = localTracks.get(trackId); + if (track == null) { + result.error("addTrack", "addTrack() track is null", null); + return; + } + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "addTrack() peerConnection is null"); + result.error("addTrack", "addTrack() peerConnection is null", null); + } else { + pco.addTrack(track, streamIds, result); + } + } + + public void removeTrack(String peerConnectionId, String senderId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "removeTrack() peerConnection is null"); + result.error("removeTrack", "removeTrack() peerConnection is null", null); + } else { + pco.removeTrack(senderId, result); + } + } + + public void addTransceiver(String peerConnectionId, String trackId, Map transceiverInit, + Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + MediaStreamTrack track = localTracks.get(trackId); + if (track == null) { + result.error("addTransceiver", "addTransceiver() track is null", null); + return; + } + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "addTransceiver() peerConnection is null"); + result.error("addTransceiver", "addTransceiver() peerConnection is null", null); + } else { + pco.addTransceiver(track, transceiverInit, result); + } + } + + public void addTransceiverOfType(String peerConnectionId, String mediaType, Map transceiverInit, + Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "addTransceiverOfType() peerConnection is null"); + result.error("addTransceiverOfType", "addTransceiverOfType() peerConnection is null", null); + } else { + pco.addTransceiverOfType(mediaType, transceiverInit, result); + } + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index 532718ac7b..3938204ecc 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -13,13 +13,18 @@ import java.util.HashMap; import java.util.Iterator; import java.util.Map; +import java.util.List; import org.webrtc.AudioTrack; import org.webrtc.DataChannel; +import org.webrtc.DtmfSender; import org.webrtc.IceCandidate; import org.webrtc.MediaStream; import org.webrtc.MediaStreamTrack; import org.webrtc.PeerConnection; +import org.webrtc.RtpParameters; import org.webrtc.RtpReceiver; +import org.webrtc.RtpSender; +import org.webrtc.RtpTransceiver; import org.webrtc.StatsObserver; import org.webrtc.StatsReport; import org.webrtc.VideoTrack; @@ -34,6 +39,9 @@ class PeerConnectionObserver implements PeerConnection.Observer, EventChannel.St private PeerConnection peerConnection; final Map remoteStreams = new HashMap<>(); final Map remoteTracks = new HashMap<>(); + final Map transceivers = new HashMap(); + final Map senders = new HashMap(); + final Map receivers = new HashMap(); private final StateProvider stateProvider; private final EventChannel eventChannel; @@ -468,4 +476,215 @@ private String signalingStateString(PeerConnection.SignalingState signalingState } return null; } + + @Nullable + private String transceiverDirectionString(RtpTransceiver.RtpTransceiverDirection direction) { + switch (direction) { + case SEND_RECV: + return "sendrecv"; + case SEND_ONLY: + return "sendonly"; + case RECV_ONLY: + return "recvonly"; + case INACTIVE: + return "inactive"; + } + return null; + } + + private RtpTransceiver.RtpTransceiverDirection typStringToTransceiverDirection(String direction) { + switch (direction) { + case "sendrecv": + return RtpTransceiver.RtpTransceiverDirection.SEND_RECV; + case "sendonly": + return RtpTransceiver.RtpTransceiverDirection.SEND_ONLY; + case "recvonly": + return RtpTransceiver.RtpTransceiverDirection.RECV_ONLY; + case "inactive": + return RtpTransceiver.RtpTransceiverDirection.INACTIVE; + } + return RtpTransceiver.RtpTransceiverDirection.INACTIVE; + } + + private Map rtpParametersToMap(RtpParameters rtpParameters){ + ConstraintsMap info = new ConstraintsMap(); + info.putString("transactionId", rtpParameters.transactionId); + + ConstraintsMap rtcp = new ConstraintsMap(); + rtcp.putString("cname", rtpParameters.getRtcp().getCname()); + rtcp.putBoolean("reducedSize", rtpParameters.getRtcp().getReducedSize()); + info.putMap("rtcp", rtcp.toMap()); + + ConstraintsArray headerExtensions = new ConstraintsArray(); + for(RtpParameters.HeaderExtension extension : rtpParameters.getHeaderExtensions()){ + ConstraintsMap map = new ConstraintsMap(); + map.putString("uri",extension.getUri()); + map.putInt("id", extension.getId()); + map.putBoolean("encrypted", extension.getEncrypted()); + headerExtensions.pushMap(map); + } + info.putArray("headerExtensions", headerExtensions.toArrayList()); + + ConstraintsArray encodings = new ConstraintsArray(); + for(RtpParameters.Encoding encoding : rtpParameters.encodings){ + ConstraintsMap map = new ConstraintsMap(); + map.putBoolean("active",encoding.active); + if (encoding.maxBitrateBps != null) { + map.putInt("maxBitrateBps", encoding.maxBitrateBps); + } + if (encoding.minBitrateBps != null) { + map.putInt("minBitrateBps", encoding.minBitrateBps); + } + if (encoding.maxFramerate != null) { + map.putInt("maxFramerate", encoding.maxFramerate); + } + if (encoding.numTemporalLayers != null) { + map.putInt("numTemporalLayers", encoding.numTemporalLayers); + } + if (encoding.scaleResolutionDownBy != null) { + map.putDouble("scaleResolutionDownBy", encoding.scaleResolutionDownBy); + } + map.putLong("ssrc", encoding.ssrc); + encodings.pushMap(map); + } + info.putArray("encodings", encodings.toArrayList()); + + ConstraintsArray codecs = new ConstraintsArray(); + for(RtpParameters.Codec codec : rtpParameters.codecs){ + ConstraintsMap map = new ConstraintsMap(); + map.putString("name",codec.name); + map.putInt("payloadType", codec.payloadType); + map.putInt("clockRate", codec.clockRate); + map.putInt("numChannels", codec.numChannels); + map.putMap("numTemporalLayers", new HashMap(codec.parameters)); + //map.putString("kind", codec.kind); + codecs.pushMap(map); + } + + info.putArray("codecs", codecs.toArrayList()); + return info.toMap(); + } + + @Nullable + private Map mediaTrackToMap(MediaStreamTrack track){ + ConstraintsMap info = new ConstraintsMap(); + if(track != null){ + info.putString("trackId", track.id()); + info.putString("label",track.id()); + info.putString("kind",track.kind()); + info.putBoolean("enabled", track.enabled()); + } + return info.toMap(); + } + + private Map dtmfSenderToMap(DtmfSender dtmfSender, String id){ + ConstraintsMap info = new ConstraintsMap(); + info.putString("dtmfSenderId",id); + if (dtmfSender != null) { + info.putInt("interToneGap", dtmfSender.interToneGap()); + info.putInt("duration", dtmfSender.duration()); + } + return info.toMap(); + } + + private Map rtpSenderToMap(RtpSender sender){ + ConstraintsMap info = new ConstraintsMap(); + info.putString("senderId", sender.id()); + info.putBoolean("ownsTrack", true); + info.putMap("dtmfSender", dtmfSenderToMap(sender.dtmf(), sender.id())); + info.putMap("rtpParameters", rtpParametersToMap(sender.getParameters())); + info.putMap("track", mediaTrackToMap(sender.track())); + return info.toMap(); + } + + private Map rtpReceiverToMap(RtpReceiver receiver){ + ConstraintsMap info = new ConstraintsMap(); + info.putString("receiverId", receiver.id()); + info.putMap("rtpParameters", rtpParametersToMap(receiver.getParameters())); + info.putMap("track", mediaTrackToMap(receiver.track())); + return info.toMap(); + } + + Map transceiverToMap(RtpTransceiver transceiver){ + ConstraintsMap info = new ConstraintsMap(); + info.putString("transceiverId", transceiver.getMid()); + info.putString("mid", transceiver.getMid()); + info.putString("direction", transceiverDirectionString(transceiver.getDirection())); + info.putMap("sender", rtpSenderToMap(transceiver.getSender())); + info.putMap("receiver", rtpReceiverToMap(transceiver.getReceiver())); + return info.toMap(); + } + + @Override + public void onTrack(RtpTransceiver transceiver) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onTrack"); + params.putMap("transceiver", transceiverToMap(transceiver)); + sendEvent(params); + } + + public void createSender(String kind, String streamId, Result result){ + RtpSender sender = peerConnection.createSender(kind, streamId); + senders.put(sender.id(),sender); + result.success(rtpSenderToMap(sender)); + } + + public void closeSender(String senderId, Result result) { + RtpSender sender = senders.get(senderId); + sender.dispose(); + Map params = new HashMap<>(); + params.put("result", true); + result.success(params); + } + + public void addTrack(MediaStreamTrack track, List streamIds, Result result){ + RtpSender sender = peerConnection.addTrack(track, streamIds); + senders.put(sender.id(),sender); + result.success(rtpSenderToMap(sender)); + } + + public void removeTrack(String senderId, Result result){ + RtpSender sender = senders.get(senderId); + if(sender == null){ + result.error("removeTrack", "removeTrack() sender is null", null); + return; + } + boolean res = peerConnection.removeTrack(sender); + Map params = new HashMap<>(); + params.put("result", res); + result.success(params); + } + + public void addTransceiver(MediaStreamTrack track, Map transceiverInit, Result result) { + RtpTransceiver transceiver; + if(transceiverInit != null){ + List streamIds = (List)transceiverInit.get("streamIds"); + String direction = (String)transceiverInit.get("direction"); + RtpTransceiver.RtpTransceiverInit init = new RtpTransceiver.RtpTransceiverInit(typStringToTransceiverDirection(direction) ,streamIds); + transceiver = peerConnection.addTransceiver(track, init); + } else { + transceiver = peerConnection.addTransceiver(track); + } + transceivers.put(transceiver.getMid(), transceiver); + result.success(transceiverToMap(transceiver)); + } + + public void addTransceiverOfType(String mediaType, Map transceiverInit, Result result) { + MediaStreamTrack.MediaType type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + if(mediaType == "audio") + type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + else if(mediaType == "video") + type = MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO; + RtpTransceiver transceiver; + if(transceiverInit != null){ + List streamIds = (List)transceiverInit.get("streamIds"); + String direction = (String)transceiverInit.get("direction"); + RtpTransceiver.RtpTransceiverInit init = new RtpTransceiver.RtpTransceiverInit(typStringToTransceiverDirection(direction) ,streamIds); + transceiver = peerConnection.addTransceiver(type, init); + } else { + transceiver = peerConnection.addTransceiver(type); + } + transceivers.put(transceiver.getMid(), transceiver); + result.success(transceiverToMap(transceiver)); + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java index eb16e20697..08291dcc50 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java @@ -89,6 +89,10 @@ public void putInt(String key, int value) { mMap.put(key, value); } + public void putLong(String key, long value) { + mMap.put(key, value); + } + public void putString(String key, String value) { mMap.put(key, value); } From 930a297e510b8d79f20fda2e82acf817c406e5fa Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 6 Oct 2020 21:58:31 +0800 Subject: [PATCH 03/26] Add more. --- .../webrtc/MethodCallHandlerImpl.java | 40 +++++--- .../webrtc/PeerConnectionObserver.java | 96 +++++++++++++++---- lib/src/media_stream.dart | 10 +- lib/src/rtc_peerconnection.dart | 10 ++ 4 files changed, 122 insertions(+), 34 deletions(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index 90e7fb1b21..d05b103863 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -602,15 +602,13 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { String peerConnectionId = call.argument("peerConnectionId"); String direction = call.argument("direction"); String transceiverId = call.argument("transceiverId"); - - result.notImplemented(); + rtpTransceiverSetDirection(peerConnectionId, direction, transceiverId, result); break; } case "rtpTransceiverGetCurrentDirection": { String peerConnectionId = call.argument("peerConnectionId"); String transceiverId = call.argument("transceiverId"); - - result.notImplemented(); + rtpTransceiverGetCurrentDirection(peerConnectionId, transceiverId, result); break; } case "rtpTransceiverStop": { @@ -1587,12 +1585,32 @@ public void addTransceiver(String peerConnectionId, String trackId, Map transceiverInit, Result result) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); - if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "addTransceiverOfType() peerConnection is null"); - result.error("addTransceiverOfType", "addTransceiverOfType() peerConnection is null", null); - } else { - pco.addTransceiverOfType(mediaType, transceiverInit, result); - } + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "addTransceiverOfType() peerConnection is null"); + result.error("addTransceiverOfType", "addTransceiverOfType() peerConnection is null", null); + } else { + pco.addTransceiverOfType(mediaType, transceiverInit, result); + } + } + + public void rtpTransceiverSetDirection(String peerConnectionId, String direction, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "rtpTransceiverSetDirection() peerConnection is null"); + result.error("rtpTransceiverSetDirection", "rtpTransceiverSetDirection() peerConnection is null", null); + } else { + pco.rtpTransceiverSetDirection(direction, transceiverId, result); + } + } + + public void rtpTransceiverGetCurrentDirection(String peerConnectionId, String transceiverId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + Log.d(TAG, "rtpTransceiverSetDirection() peerConnection is null"); + result.error("rtpTransceiverSetDirection", "rtpTransceiverSetDirection() peerConnection is null", null); + } else { + pco.rtpTransceiverGetCurrentDirection(transceiverId, result); + } } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index 3938204ecc..e96f661447 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -275,6 +275,7 @@ public void onAddStream(MediaStream mediaStream) { ConstraintsMap params = new ConstraintsMap(); params.putString("event", "onAddStream"); params.putString("streamId", streamId); + params.putString("ownerTag", id); ConstraintsArray audioTracks = new ConstraintsArray(); ConstraintsArray videoTracks = new ConstraintsArray(); @@ -343,26 +344,39 @@ public void onRemoveStream(MediaStream mediaStream) { @Override public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) { - Log.d(TAG, "onAddTrack"); - for (MediaStream stream : mediaStreams) { - String streamId = stream.getId(); - MediaStreamTrack track = receiver.track(); + Log.d(TAG, "onAddTrack"); + // for plan-b + for (MediaStream stream : mediaStreams) { + String streamId = stream.getId(); + MediaStreamTrack track = receiver.track(); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onAddTrack"); + params.putString("streamId", streamId); + params.putString("ownerTag", id); + params.putString("trackId", track.id()); + + String trackId = track.id(); + ConstraintsMap trackInfo = new ConstraintsMap(); + trackInfo.putString("id", trackId); + trackInfo.putString("label", track.kind()); + trackInfo.putString("kind", track.kind()); + trackInfo.putBoolean("enabled", track.enabled()); + trackInfo.putString("readyState", track.state().toString()); + trackInfo.putBoolean("remote", true); + params.putMap("track", trackInfo.toMap()); + sendEvent(params); + } + // For unified-plan ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onAddTrack"); - params.putString("streamId", streamId); - params.putString("trackId", track.id()); - - String trackId = track.id(); - ConstraintsMap trackInfo = new ConstraintsMap(); - trackInfo.putString("id", trackId); - trackInfo.putString("label", track.kind()); - trackInfo.putString("kind", track.kind()); - trackInfo.putBoolean("enabled", track.enabled()); - trackInfo.putString("readyState", track.state().toString()); - trackInfo.putBoolean("remote", true); - params.putMap("track", trackInfo.toMap()); + ConstraintsArray streams = new ConstraintsArray(); + for(int i = 0; i< mediaStreams.length; i++){ + MediaStream stream = mediaStreams[i]; + streams.pushMap(new ConstraintsMap(mediaStreamToMap(stream))); + } + params.putArray("mediaStreams", streams.toArrayList()); + params.putMap("receiver", rtpReceiverToMap(receiver)); + params.putString("event", "onAddTrack2"); sendEvent(params); - } } @Override @@ -565,14 +579,39 @@ private Map rtpParametersToMap(RtpParameters rtpParameters){ return info.toMap(); } + @Nullable + private Map mediaStreamToMap(MediaStream stream){ + ConstraintsMap params = new ConstraintsMap(); + + params.putString("streamId", stream.getId()); + params.putString("ownerTag", id); + ConstraintsArray audioTracks = new ConstraintsArray(); + ConstraintsArray videoTracks = new ConstraintsArray(); + + for (int i = 0; i < stream.audioTracks.size(); i++) { + MediaStreamTrack track = stream.videoTracks.get(i); + audioTracks.pushMap(new ConstraintsMap(mediaTrackToMap(track))); + } + for (int i = 0; i < stream.videoTracks.size(); i++) { + MediaStreamTrack track = stream.videoTracks.get(i); + videoTracks.pushMap(new ConstraintsMap(mediaTrackToMap(track))); + } + + params.putArray("audioTracks", audioTracks.toArrayList()); + params.putArray("videoTracks", videoTracks.toArrayList()); + + return params.toMap(); + } + @Nullable private Map mediaTrackToMap(MediaStreamTrack track){ ConstraintsMap info = new ConstraintsMap(); if(track != null){ info.putString("trackId", track.id()); - info.putString("label",track.id()); + info.putString("label",track.getClass() == VideoTrack.class? "video": "audio"); info.putString("kind",track.kind()); info.putBoolean("enabled", track.enabled()); + info.putString("readyState", track.state().toString()); } return info.toMap(); } @@ -687,4 +726,23 @@ else if(mediaType == "video") transceivers.put(transceiver.getMid(), transceiver); result.success(transceiverToMap(transceiver)); } + + public void rtpTransceiverSetDirection(String direction, String transceiverId, Result result) { + RtpTransceiver transceiver = transceivers.get(transceiverId); + if (transceiver == null) { + result.error("rtpTransceiverSetDirection", "rtpTransceiverSetDirection() transceiver is null", null); + } + transceiver.setDirection(typStringToTransceiverDirection(direction)); + result.success(null); + } + + public void rtpTransceiverGetCurrentDirection(String transceiverId, Result result) { + RtpTransceiver transceiver = transceivers.get(transceiverId); + if (transceiver == null) { + result.error("rtpTransceiverSetDirection", "rtpTransceiverSetDirection() transceiver is null", null); + } + ConstraintsMap params = new ConstraintsMap(); + params.putString("result", transceiverDirectionString(transceiver.getDirection())); + result.success(params.toMap()); + } } diff --git a/lib/src/media_stream.dart b/lib/src/media_stream.dart index 37625e0f3d..2a6844a7af 100644 --- a/lib/src/media_stream.dart +++ b/lib/src/media_stream.dart @@ -4,6 +4,10 @@ import 'utils.dart'; class MediaStream { MediaStream(this._streamId, this._ownerTag); + factory MediaStream.fromMap(Map map) { + return MediaStream(map['streamId'], map['ownerTag']) + ..setMediaTracks(map['audioTracks'], map['videoTracks']); + } final _channel = WebRTC.methodChannel(); final String _streamId; final String _ownerTag; @@ -15,14 +19,12 @@ class MediaStream { void setMediaTracks(List audioTracks, List videoTracks) { _audioTracks.clear(); audioTracks.forEach((track) { - _audioTracks.add(MediaStreamTrack( - track['id'], track['label'], track['kind'], track['enabled'])); + _audioTracks.add(MediaStreamTrack.fromMap(track)); }); _videoTracks.clear(); videoTracks.forEach((track) { - _videoTracks.add(MediaStreamTrack( - track['id'], track['label'], track['kind'], track['enabled'])); + _videoTracks.add(MediaStreamTrack.fromMap(track)); }); } diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index 7520c22af4..2584e9e52e 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -1,4 +1,5 @@ import 'dart:async'; +import 'dart:html'; import 'package:flutter/services.dart'; @@ -191,6 +192,15 @@ class RTCPeerConnection { case 'onTrack': onTrack?.call(RTCRtpTransceiver.fromMap(map['transceiver'])); break; + case 'onAddTrack2': + var streamsParams = map['mediaStreams'] as List>; + var mediaStreams = []; + streamsParams.forEach((element) { + mediaStreams.add(MediaStream.fromMap(element)); + }); + onAddTrack2?.call( + RTCRtpReceiver.fromMap(map['receiver']), mediaStreams); + break; } } From fe320c17407139fd26316ff364ede812bdb14bfc Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 6 Oct 2020 22:25:45 +0800 Subject: [PATCH 04/26] Update. --- .../com/cloudwebrtc/webrtc/PeerConnectionObserver.java | 9 +++------ lib/src/rtc_peerconnection.dart | 6 ++---- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index e96f661447..392a40d18f 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -582,24 +582,21 @@ private Map rtpParametersToMap(RtpParameters rtpParameters){ @Nullable private Map mediaStreamToMap(MediaStream stream){ ConstraintsMap params = new ConstraintsMap(); - params.putString("streamId", stream.getId()); params.putString("ownerTag", id); ConstraintsArray audioTracks = new ConstraintsArray(); ConstraintsArray videoTracks = new ConstraintsArray(); - for (int i = 0; i < stream.audioTracks.size(); i++) { - MediaStreamTrack track = stream.videoTracks.get(i); + for (MediaStreamTrack track : stream.audioTracks) { audioTracks.pushMap(new ConstraintsMap(mediaTrackToMap(track))); } - for (int i = 0; i < stream.videoTracks.size(); i++) { - MediaStreamTrack track = stream.videoTracks.get(i); + + for (MediaStreamTrack track : stream.videoTracks) { videoTracks.pushMap(new ConstraintsMap(mediaTrackToMap(track))); } params.putArray("audioTracks", audioTracks.toArrayList()); params.putArray("videoTracks", videoTracks.toArrayList()); - return params.toMap(); } diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index 2584e9e52e..e20c68e764 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -194,10 +194,8 @@ class RTCPeerConnection { break; case 'onAddTrack2': var streamsParams = map['mediaStreams'] as List>; - var mediaStreams = []; - streamsParams.forEach((element) { - mediaStreams.add(MediaStream.fromMap(element)); - }); + var mediaStreams = + streamsParams.map((e) => MediaStream.fromMap(e)).toList(); onAddTrack2?.call( RTCRtpReceiver.fromMap(map['receiver']), mediaStreams); break; From 2b404f828f2257339048424d90895baf224e4cfe Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Thu, 8 Oct 2020 21:53:24 +0800 Subject: [PATCH 05/26] Fix bug. --- .../java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java | 4 +++- lib/src/rtc_peerconnection.dart | 2 -- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index 392a40d18f..ce852bc2cd 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -569,7 +569,9 @@ private Map rtpParametersToMap(RtpParameters rtpParameters){ map.putString("name",codec.name); map.putInt("payloadType", codec.payloadType); map.putInt("clockRate", codec.clockRate); - map.putInt("numChannels", codec.numChannels); + if (codec.numChannels != null) { + map.putInt("numChannels", codec.numChannels); + } map.putMap("numTemporalLayers", new HashMap(codec.parameters)); //map.putString("kind", codec.kind); codecs.pushMap(map); diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index e20c68e764..08bb61ee0b 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -1,6 +1,4 @@ import 'dart:async'; -import 'dart:html'; - import 'package:flutter/services.dart'; import 'enums.dart'; From 2812cba3b606937fc816e95682e970917b998514 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Thu, 8 Oct 2020 23:21:28 +0800 Subject: [PATCH 06/26] Update. --- .../cloudwebrtc/webrtc/GetUserMediaImpl.java | 47 ++-- .../webrtc/MethodCallHandlerImpl.java | 212 +++++++++--------- .../webrtc/PeerConnectionObserver.java | 78 +++++-- lib/src/rtc_rtp_receiver.dart | 16 -- lib/src/rtc_rtp_sender.dart | 1 + 5 files changed, 195 insertions(+), 159 deletions(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java index 8d94262ec5..121c92dd0b 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java @@ -211,6 +211,12 @@ public void onResume() { this.applicationContext = applicationContext; } + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg,null); + Log.d(TAG, errorMsg); + } + /** * Includes default constraints set for the audio media type. * @@ -396,7 +402,7 @@ void getUserMedia( // requestedMediaTypes is the empty set, the method invocation fails // with a TypeError. if (requestPermissions.isEmpty()) { - result.error("TypeError", "constraints requests no media types", null); + resultError("getUserMedia", "TypeError, constraints requests no media types", result); return; } @@ -423,7 +429,7 @@ public void invoke(Object... args) { // getUserMedia() algorithm, if the user has denied // permission, fail "with a new DOMException object whose // name attribute has the value NotAllowedError." - result.error("DOMException", "NotAllowedError", null); + resultError("getUserMedia", "DOMException, NotAllowedError", result); } }); } @@ -453,7 +459,7 @@ protected void onReceiveResult(int requestCode, Bundle resultData) { Intent mediaProjectionData = resultData.getParcelable(PROJECTION_DATA); if (resultCode != Activity.RESULT_OK) { - result.error(null, "User didn't give permission to capture the screen.", null); + resultError("screenRequestPremissions", "User didn't give permission to capture the screen.", result); return; } @@ -465,13 +471,11 @@ protected void onReceiveResult(int requestCode, Bundle resultData) { new MediaProjection.Callback() { @Override public void onStop() { - Log.e(TAG, "User revoked permission to capture the screen."); - result.error(null, "User revoked permission to capture the screen.", null); + resultError("MediaProjection.Callback()", "User revoked permission to capture the screen.", result); } }); if (videoCapturer == null) { - result.error( - /* type */ "GetDisplayMediaFailed", "Failed to create new VideoCapturer!", null); + resultError("screenRequestPremissions", "GetDisplayMediaFailed, User revoked permission to capture the screen.", result); return; } @@ -573,7 +577,7 @@ private void getUserMedia( // specified by // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia // with respect to distinguishing the various causes of failure. - result.error(/* type */ "GetUserMediaFailed", "Failed to create new track", null); + resultError("getUserMedia", "Failed to create new track.", result); return; } @@ -753,7 +757,7 @@ private void requestPermissions( void switchCamera(String id, Result result) { VideoCapturer videoCapturer = mVideoCapturers.get(id); if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + id, null); + resultError("switchCamera", "Video capturer not found for id: " + id, result); return; } @@ -767,7 +771,7 @@ public void onCameraSwitchDone(boolean b) { @Override public void onCameraSwitchError(String s) { - result.error("Switching camera failed", s, null); + resultError("switchCamera", "Switching camera failed: " + id, result); } }); } @@ -818,7 +822,7 @@ void stopRecording(Integer id) { void hasTorch(String trackId, Result result) { VideoCapturer videoCapturer = mVideoCapturers.get(trackId); if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + trackId, null); + resultError("hasTorch", "Video capturer not found for id: " + trackId, result); return; } @@ -837,8 +841,7 @@ void hasTorch(String trackId, Result result) { (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera2Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); return; } @@ -866,8 +869,7 @@ void hasTorch(String trackId, Result result) { camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera1Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); return; } @@ -879,15 +881,14 @@ void hasTorch(String trackId, Result result) { return; } - Log.e(TAG, "[TORCH] Video capturer not compatible"); - result.error(null, "Video capturer not compatible", null); + resultError("hasTorch", "[TORCH] Video capturer not compatible", result); } @RequiresApi(api = VERSION_CODES.LOLLIPOP) void setTorch(String trackId, boolean torch, Result result) { VideoCapturer videoCapturer = mVideoCapturers.get(trackId); if (videoCapturer == null) { - result.error(null, "Video capturer not found for id: " + trackId, null); + resultError("setTorch", "Video capturer not found for id: " + trackId, result); return; } @@ -919,8 +920,7 @@ void setTorch(String trackId, boolean torch, Result result) { (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera2Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); return; } @@ -959,8 +959,7 @@ void setTorch(String trackId, boolean torch, Result result) { camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); } catch (NoSuchFieldWithNameException e) { // Most likely the upstream Camera1Capturer class have changed - Log.e(TAG, "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`"); - result.error(null, "Failed to get `" + e.fieldName + "` from `" + e.className + "`", null); + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); return; } @@ -972,9 +971,7 @@ void setTorch(String trackId, boolean torch, Result result) { result.success(null); return; } - - Log.e(TAG, "[TORCH] Video capturer not compatible"); - result.error(null, "Video capturer not compatible", null); + resultError("setTorch", "[TORCH] Video capturer not compatible", result); } private Object getPrivateProperty(Class klass, Object object, String fieldName) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index d05b103863..cbd0383f23 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -121,6 +121,12 @@ interface AudioManager { this.audioManager = audioManager; } + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg,null); + Log.d(TAG, errorMsg); + } + void dispose() { mPeerConnectionObservers.clear(); } @@ -275,10 +281,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { } result.success("success"); } else { - Log.d(TAG, "dtmf() peerConnection is null"); - result - .error("dtmf", "sendDtmf() peerConnection is null", - null); + resultError("dtmf", "peerConnection is null", result); } break; } @@ -314,8 +317,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { String data = call.argument("data"); byteBuffer = ByteBuffer.wrap(data.getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { - Log.d(TAG, "Could not encode text string as UTF-8."); - result.error("dataChannelSendFailed", "Could not encode text string as UTF-8.", null); + resultError("dataChannelSend", "Could not encode text string as UTF-8.", result); return; } } @@ -400,8 +402,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { int textureId = call.argument("textureId"); FlutterRTCVideoRenderer render = renders.get(textureId); if (render == null) { - result.error("FlutterRTCVideoRendererNotFound", "render [" + textureId + "] not found !", - null); + resultError("videoRendererDispose", "render [" + textureId + "] not found !", result); return; } render.Dispose(); @@ -416,8 +417,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { FlutterRTCVideoRenderer render = renders.get(textureId); if (render == null) { - result.error("FlutterRTCVideoRendererNotFound", "render [" + textureId + "] not found !", - null); + resultError("videoRendererSetSrcObject", "render [" + textureId + "] not found !", result); return; } @@ -487,10 +487,10 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { getUserMediaImpl.startRecordingToFile(path, recorderId, videoTrack, audioChannel); result.success(null); } else { - result.error("0", "No tracks", null); + resultError("startRecordToFile", "No tracks", result); } } catch (Exception e) { - result.error("-1", e.getMessage(), e); + resultError("startRecordToFile", e.getMessage(), result); } break; case "stopRecordToFile": @@ -506,10 +506,10 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { if (track instanceof VideoTrack) { new FrameCapturer((VideoTrack) track, new File(path), result); } else { - result.error(null, "It's not video track", null); + resultError("captureFrame", "It's not video track", result); } } else { - result.error(null, "Track is null", null); + resultError("captureFrame", "Track is null", result); } break; case "getLocalDescription": { @@ -522,9 +522,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { params.putString("type", sdp.type.canonicalForm()); result.success(params.toMap()); } else { - Log.d(TAG, "getLocalDescription() peerConnection is null"); - result.error("getLocalDescriptionFailed", "getLocalDescription() peerConnection is null", - null); + resultError("getLocalDescription", "peerConnection is nulll", result); } break; } @@ -538,10 +536,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { params.putString("type", sdp.type.canonicalForm()); result.success(params.toMap()); } else { - Log.d(TAG, "getRemoteDescription() peerConnection is null"); - result - .error("getRemoteDescriptionFailed", "getRemoteDescription() peerConnection is null", - null); + resultError("getRemoteDescription", "peerConnection is nulll", result); } break; } @@ -553,8 +548,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { peerConnectionSetConfiguration(new ConstraintsMap(configuration), peerConnection); result.success(null); } else { - Log.d(TAG, "setConfiguration() peerConnection is null"); - result.error("setConfigurationFailed", "setConfiguration() peerConnection is null", null); + resultError("setConfiguration", "peerConnection is nulll", result); } break; } @@ -614,28 +608,34 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { case "rtpTransceiverStop": { String peerConnectionId = call.argument("peerConnectionId"); String transceiverId = call.argument("transceiverId"); - - result.notImplemented(); + rtpTransceiverStop(peerConnectionId, transceiverId, result); break; } case "rtpSenderSetParameters": { - result.notImplemented(); + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + Map parameters = call.argument("parameters"); + rtpSenderSetParameters(peerConnectionId, rtpSenderId, parameters, result); break; } case "rtpSenderReplaceTrack": { - result.notImplemented(); + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + String trackId = call.argument("trackId"); + rtpSenderSetTrack(peerConnectionId, rtpSenderId, trackId, true, result); break; } case "rtpSenderSetTrack": { - result.notImplemented(); + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + String trackId = call.argument("trackId"); + rtpSenderSetTrack(peerConnectionId, rtpSenderId, trackId, false, result); break; } case "rtpSenderDispose": { - result.notImplemented(); - break; - } - case "rtpReceiverSetParameters": { - result.notImplemented(); + String peerConnectionId = call.argument("peerConnectionId"); + String rtpSenderId = call.argument("rtpSenderId"); + rtpSenderDispose(peerConnectionId, rtpSenderId, result); break; } default: @@ -1018,9 +1018,7 @@ public void getUserMedia(ConstraintsMap constraints, Result result) { // specified by // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "getUserMediaFailed", - "Failed to create new media stream", null); + resultError("getUserMediaFailed", "Failed to create new media stream", result); return; } @@ -1036,9 +1034,7 @@ public void getDisplayMedia(ConstraintsMap constraints, Result result) { // specified by // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia // with respect to distinguishing the various causes of failure. - result.error( - /* type */ "getDisplayMedia", - "Failed to create new media stream", null); + resultError("getDisplayMedia", "Failed to create new media stream", result); return; } @@ -1075,7 +1071,7 @@ private void createLocalMediaStream(Result result) { localStreams.put(streamId, mediaStream); if (mediaStream == null) { - result.error(/* type */ "createLocalMediaStream", "Failed to create new media stream", null); + resultError("createLocalMediaStream", "Failed to create new media stream", result); return; } Map resultMap = new HashMap<>(); @@ -1136,14 +1132,10 @@ public void mediaStreamAddTrack(final String streaemId, final String trackId, Re mediaStream.addTrack((VideoTrack) track); } } else { - String errorMsg = "mediaStreamAddTrack() track [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamAddTrack", errorMsg, null); + resultError("mediaStreamAddTrack", "mediaStreamAddTrack() track [" + trackId + "] is null", result); } } else { - String errorMsg = "mediaStreamAddTrack() stream [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamAddTrack", errorMsg, null); + resultError("mediaStreamAddTrack", "mediaStreamAddTrack() track [" + trackId + "] is null", result); } result.success(null); } @@ -1159,14 +1151,10 @@ public void mediaStreamRemoveTrack(final String streaemId, final String trackId, mediaStream.removeTrack((VideoTrack) track); } } else { - String errorMsg = "mediaStreamRemoveTrack() track [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamRemoveTrack", errorMsg, null); + resultError("mediaStreamRemoveTrack", "mediaStreamAddTrack() track [" + trackId + "] is null", result); } } else { - String errorMsg = "mediaStreamRemoveTrack() stream [" + trackId + "] is null"; - Log.d(TAG, errorMsg); - result.error("mediaStreamRemoveTrack", errorMsg, null); + resultError("mediaStreamRemoveTrack", "mediaStreamAddTrack() track [" + trackId + "] is null", result); } result.success(null); } @@ -1241,9 +1229,7 @@ public void peerConnectionAddStream(final String streamId, final String id, Resu Log.d(TAG, "addStream" + result); result.success(res); } else { - Log.d(TAG, "peerConnectionAddStream() peerConnection is null"); - result.error("peerConnectionAddStreamFailed", - "peerConnectionAddStream() peerConnection is null", null); + resultError("peerConnectionAddStream", "peerConnection is null", result); } } @@ -1258,9 +1244,7 @@ public void peerConnectionRemoveStream(final String streamId, final String id, R peerConnection.removeStream(mediaStream); result.success(null); } else { - Log.d(TAG, "peerConnectionRemoveStream() peerConnection is null"); - result.error("peerConnectionRemoveStreamFailed", - "peerConnectionAddStream() peerConnection is null", null); + resultError("peerConnectionRemoveStream", "peerConnection is null", result); } } @@ -1274,8 +1258,8 @@ public void peerConnectionCreateOffer( peerConnection.createOffer(new SdpObserver() { @Override public void onCreateFailure(String s) { - result.error("WEBRTC_CREATE_OFFER_ERROR", s, null); - } + resultError("peerConnectionCreateOffer", "WEBRTC_CREATE_OFFER_ERROR: " + s, result); + } @Override public void onCreateSuccess(final SessionDescription sdp) { @@ -1294,8 +1278,7 @@ public void onSetSuccess() { } }, parseMediaConstraints(constraints)); } else { - Log.d(TAG, "peerConnectionCreateOffer() peerConnection is null"); - result.error("WEBRTC_CREATE_OFFER_ERROR", "peerConnection is null", null); + resultError("peerConnectionCreateOffer", "WEBRTC_CREATE_OFFER_ERROR", result); } } @@ -1309,7 +1292,7 @@ public void peerConnectionCreateAnswer( peerConnection.createAnswer(new SdpObserver() { @Override public void onCreateFailure(String s) { - result.error("WEBRTC_CREATE_ANSWER_ERROR", s, null); + resultError("peerConnectionCreateAnswer", "WEBRTC_CREATE_ANSWER_ERROR: " + s, result); } @Override @@ -1329,8 +1312,7 @@ public void onSetSuccess() { } }, parseMediaConstraints(constraints)); } else { - Log.d(TAG, "peerConnectionCreateAnswer() peerConnection is null"); - result.error("WEBRTC_CREATE_ANSWER_ERROR", "peerConnection is null", null); + resultError("peerConnectionCreateAnswer", "peerConnection is null", result); } } @@ -1361,12 +1343,11 @@ public void onCreateFailure(String s) { @Override public void onSetFailure(String s) { - result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", s, null); + resultError("peerConnectionSetLocalDescription", "WEBRTC_SET_LOCAL_DESCRIPTION_ERROR: " + s, result); } }, sdp); } else { - Log.d(TAG, "peerConnectionSetLocalDescription() peerConnection is null"); - result.error("WEBRTC_SET_LOCAL_DESCRIPTION_ERROR", "peerConnection is null", null); + resultError("peerConnectionSetLocalDescription", "WEBRTC_SET_LOCAL_DESCRIPTION_ERROR: peerConnection is null", result); } Log.d(TAG, "peerConnectionSetLocalDescription() end"); } @@ -1399,12 +1380,11 @@ public void onCreateFailure(String s) { @Override public void onSetFailure(String s) { - result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", s, null); + resultError("peerConnectionSetRemoteDescription", "WEBRTC_SET_REMOTE_DESCRIPTION_ERROR: " + s, result); } }, sdp); } else { - Log.d(TAG, "peerConnectionSetRemoteDescription() peerConnection is null"); - result.error("WEBRTC_SET_REMOTE_DESCRIPTION_ERROR", "peerConnection is null", null); + resultError("peerConnectionSetRemoteDescription", "WEBRTC_SET_REMOTE_DESCRIPTION_ERROR: peerConnection is null", result); } Log.d(TAG, "peerConnectionSetRemoteDescription() end"); } @@ -1422,9 +1402,7 @@ public void peerConnectionAddICECandidate(ConstraintsMap candidateMap, final Str ); res = peerConnection.addIceCandidate(candidate); } else { - Log.d(TAG, "peerConnectionAddICECandidate() peerConnection is null"); - result.error("peerConnectionAddICECandidateFailed", - "peerConnectionAddICECandidate() peerConnection is null", null); + resultError("peerConnectionAddICECandidate", "peerConnection is null", result); } result.success(res); Log.d(TAG, "peerConnectionAddICECandidate() end"); @@ -1433,7 +1411,7 @@ public void peerConnectionAddICECandidate(ConstraintsMap candidateMap, final Str public void peerConnectionGetStats(String trackId, String id, final Result result) { PeerConnectionObserver pco = mPeerConnectionObservers.get(id); if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "peerConnectionGetStats() peerConnection is null"); + resultError("peerConnectionGetStats", "peerConnection is null", result); } else { pco.getStats(trackId, result); } @@ -1522,46 +1500,39 @@ public void setActivity(Activity activity) { public void createSender(String peerConnectionId, String kind, String streamId, Result result) { PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "createSender() peerConnection is null"); - result.error("createSender", "createSender() peerConnection is null", null); + resultError("createSender", "peerConnection is null", result); } else { - pco.createSender(kind, streamId, result); + pco.createSender(kind, streamId, result); } } - public void stopSender(String peerConnectionId, String senderId, Result result) { - PeerConnectionObserver pco - = mPeerConnectionObservers.get(peerConnectionId); + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "removeTrack() peerConnection is null"); - result.error("removeTrack", "removeTrack() peerConnection is null", null); + resultError("stopSender", "peerConnection is null", result); } else { - pco.closeSender(senderId, result); + pco.closeSender(senderId, result); } } public void addTrack(String peerConnectionId, String trackId, List streamIds, Result result){ - PeerConnectionObserver pco - = mPeerConnectionObservers.get(peerConnectionId); + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); MediaStreamTrack track = localTracks.get(trackId); if (track == null) { - result.error("addTrack", "addTrack() track is null", null); - return; + resultError("addTrack", "track is null", result); + return; } if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "addTrack() peerConnection is null"); - result.error("addTrack", "addTrack() peerConnection is null", null); + resultError("addTrack", "peerConnection is null", result); } else { - pco.addTrack(track, streamIds, result); + pco.addTrack(track, streamIds, result); } } public void removeTrack(String peerConnectionId, String senderId, Result result) { PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "removeTrack() peerConnection is null"); - result.error("removeTrack", "removeTrack() peerConnection is null", null); + resultError("removeTrack", "peerConnection is null", result); } else { pco.removeTrack(senderId, result); } @@ -1572,12 +1543,11 @@ public void addTransceiver(String peerConnectionId, String trackId, Map parameters, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderSetParameters", "peerConnection is null", result); + } else { + pco.rtpSenderSetParameters(rtpSenderId, parameters, result); + } + } + + public void rtpSenderDispose(String peerConnectionId, String rtpSenderId, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderDispose", "peerConnection is null", result); + } else { + pco.rtpSenderDispose(rtpSenderId, result); + } + } + + public void rtpSenderSetTrack(String peerConnectionId, String rtpSenderId, String trackId, boolean replace, Result result) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + if (pco == null || pco.getPeerConnection() == null) { + resultError("rtpSenderSetTrack", "peerConnection is null", result); + } else { + MediaStreamTrack track = localTracks.get(trackId); + if (track == null) { + resultError("rtpSenderSetTrack", "track is null", result); + return; + } + pco.rtpSenderSetTrack(rtpSenderId, track, result, replace); + } + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index ce852bc2cd..0b8a910a51 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -56,6 +56,12 @@ class PeerConnectionObserver implements PeerConnection.Observer, EventChannel.St eventChannel.setStreamHandler(this); } + static private void resultError(String method, String error, Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg,null); + Log.d(TAG, errorMsg); + } + @Override public void onListen(Object o, EventChannel.EventSink sink) { eventSink = new AnyThreadSink(sink); @@ -117,17 +123,15 @@ void createDataChannel(String label, ConstraintsMap config, Result result) { // breakages). int dataChannelId = init.id; if (dataChannel != null && -1 != dataChannelId) { - dataChannels.put(dataChannelId, dataChannel); - registerDataChannelObserver(dataChannelId, dataChannel); + dataChannels.put(dataChannelId, dataChannel); + registerDataChannelObserver(dataChannelId, dataChannel); - ConstraintsMap params = new ConstraintsMap(); - params.putInt("id", dataChannel.id()); - params.putString("label", dataChannel.label()); - result.success(params.toMap()); + ConstraintsMap params = new ConstraintsMap(); + params.putInt("id", dataChannel.id()); + params.putString("label", dataChannel.label()); + result.success(params.toMap()); } else { - result.error("createDataChannel", - "Can't create data-channel for id: " + dataChannelId, - null); + resultError("createDataChannel", "Can't create data-channel for id: " + dataChannelId, result); } } @@ -192,10 +196,7 @@ public void onComplete(StatsReport[] reports) { }, track); } else { - Log.e(TAG, "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId); - result.error("peerConnectionGetStats", - "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId, - null); + resultError("peerConnectionGetStats","MediaStreamTrack not found for id: " + trackId, result); } } @@ -520,6 +521,11 @@ private RtpTransceiver.RtpTransceiverDirection typStringToTransceiverDirection(S return RtpTransceiver.RtpTransceiverDirection.INACTIVE; } + private RtpParameters MapToRtpParameters(Map parameters) { + RtpParameters rtpParameters = null; + return rtpParameters; + } + private Map rtpParametersToMap(RtpParameters rtpParameters){ ConstraintsMap info = new ConstraintsMap(); info.putString("transactionId", rtpParameters.transactionId); @@ -684,7 +690,7 @@ public void addTrack(MediaStreamTrack track, List streamIds, Result resu public void removeTrack(String senderId, Result result){ RtpSender sender = senders.get(senderId); if(sender == null){ - result.error("removeTrack", "removeTrack() sender is null", null); + resultError("removeTrack", "sender is null", result); return; } boolean res = peerConnection.removeTrack(sender); @@ -729,7 +735,8 @@ else if(mediaType == "video") public void rtpTransceiverSetDirection(String direction, String transceiverId, Result result) { RtpTransceiver transceiver = transceivers.get(transceiverId); if (transceiver == null) { - result.error("rtpTransceiverSetDirection", "rtpTransceiverSetDirection() transceiver is null", null); + resultError("rtpTransceiverSetDirection", "transceiver is null", result); + return; } transceiver.setDirection(typStringToTransceiverDirection(direction)); result.success(null); @@ -738,10 +745,49 @@ public void rtpTransceiverSetDirection(String direction, String transceiverId, R public void rtpTransceiverGetCurrentDirection(String transceiverId, Result result) { RtpTransceiver transceiver = transceivers.get(transceiverId); if (transceiver == null) { - result.error("rtpTransceiverSetDirection", "rtpTransceiverSetDirection() transceiver is null", null); + resultError("rtpTransceiverGetCurrentDirection", "transceiver is null", result); + return; } ConstraintsMap params = new ConstraintsMap(); params.putString("result", transceiverDirectionString(transceiver.getDirection())); result.success(params.toMap()); } + + public void rtpTransceiverStop(String transceiverId, Result result) { + RtpTransceiver transceiver = transceivers.get(transceiverId); + if (transceiver == null) { + resultError("rtpTransceiverStop", "transceiver is null", result); + return; + } + transceiver.stop(); + result.success(null); + } + + public void rtpSenderSetParameters(String rtpSenderId, Map parameters, Result result) { + RtpSender sender = senders.get(rtpSenderId); + if (sender == null) { + resultError("rtpSenderSetParameters", "sender is null", result); + return; + } + sender.setParameters(MapToRtpParameters(parameters)); + } + + public void rtpSenderSetTrack(String rtpSenderId, MediaStreamTrack track, Result result, boolean replace) { + RtpSender sender = senders.get(rtpSenderId); + if (sender == null) { + resultError("rtpSenderSetTrack", "sender is null", result); + return; + } + sender.setTrack(track, replace ); + } + + public void rtpSenderDispose(String rtpSenderId, Result result) { + RtpSender sender = senders.get(rtpSenderId); + if (sender == null) { + resultError("rtpSenderDispose", "sender is null", result); + return; + } + sender.dispose(); + senders.remove(rtpSenderId); + } } diff --git a/lib/src/rtc_rtp_receiver.dart b/lib/src/rtc_rtp_receiver.dart index 0877d24598..1aa099cec4 100644 --- a/lib/src/rtc_rtp_receiver.dart +++ b/lib/src/rtc_rtp_receiver.dart @@ -50,22 +50,6 @@ class RTCRtpReceiver { _peerConnectionId = id; } - /// Currently, doesn't support changing any parameters, but may in the future. - Future setParameters(RTCRtpParameters parameters) async { - _parameters = parameters; - try { - final response = await _channel - .invokeMethod('rtpReceiverSetParameters', { - 'peerConnectionId': _peerConnectionId, - 'rtpReceiverId': _id, - 'parameters': parameters.toMap() - }); - return response['result']; - } on PlatformException catch (e) { - throw 'Unable to RTCRtpReceiver::setParameters: ${e.message}'; - } - } - /// The WebRTC specification only defines RTCRtpParameters in terms of senders, /// but this API also applies them to receivers, similar to ORTC: /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. diff --git a/lib/src/rtc_rtp_sender.dart b/lib/src/rtc_rtp_sender.dart index 9a74465eb6..ecd20f10b1 100644 --- a/lib/src/rtc_rtp_sender.dart +++ b/lib/src/rtc_rtp_sender.dart @@ -84,6 +84,7 @@ class RTCRtpSender { Future dispose() async { try { await _channel.invokeMethod('rtpSenderDispose', { + 'peerConnectionId': _peerConnectionId, 'rtpSenderId': _id, }); } on PlatformException catch (e) { From 48403c6fb5363c696d8ba4e3863c3c811bb1549b Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Fri, 9 Oct 2020 00:22:43 +0800 Subject: [PATCH 07/26] Fix the kind parameter in RTCRTPCodec. --- .../webrtc/PeerConnectionObserver.java | 22 ++++++++++++++----- lib/src/rtc_rtp_receiver.dart | 9 -------- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index 0b8a910a51..bfc5dda6b0 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -9,6 +9,7 @@ import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.EventChannel; import io.flutter.plugin.common.MethodChannel.Result; +import java.lang.reflect.Field; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.Iterator; @@ -30,9 +31,7 @@ import org.webrtc.VideoTrack; class PeerConnectionObserver implements PeerConnection.Observer, EventChannel.StreamHandler { - private final static String TAG = FlutterWebRTCPlugin.TAG; - private final SparseArray dataChannels = new SparseArray<>(); private BinaryMessenger messenger; private final String id; @@ -43,7 +42,6 @@ class PeerConnectionObserver implements PeerConnection.Observer, EventChannel.St final Map senders = new HashMap(); final Map receivers = new HashMap(); private final StateProvider stateProvider; - private final EventChannel eventChannel; private EventChannel.EventSink eventSink; @@ -578,8 +576,22 @@ private Map rtpParametersToMap(RtpParameters rtpParameters){ if (codec.numChannels != null) { map.putInt("numChannels", codec.numChannels); } - map.putMap("numTemporalLayers", new HashMap(codec.parameters)); - //map.putString("kind", codec.kind); + map.putMap("parameters", new HashMap(codec.parameters)); + try { + Field field = codec.getClass().getDeclaredField("kind"); + field.setAccessible(true); + if (field.get(codec).equals(MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO)) { + map.putString("kind", "audio"); + } else if(field.get(codec).equals(MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO)) { + map.putString("kind", "video"); + } + } catch (NoSuchFieldException e1) { + e1.printStackTrace(); + } catch (IllegalArgumentException e1) { + e1.printStackTrace(); + } catch (IllegalAccessException e1) { + e1.printStackTrace(); + } codecs.pushMap(map); } diff --git a/lib/src/rtc_rtp_receiver.dart b/lib/src/rtc_rtp_receiver.dart index 1aa099cec4..e4009bb6fe 100644 --- a/lib/src/rtc_rtp_receiver.dart +++ b/lib/src/rtc_rtp_receiver.dart @@ -1,11 +1,8 @@ import 'dart:async'; -import 'package:flutter/services.dart'; import 'media_stream_track.dart'; import 'rtc_rtp_parameters.dart'; -import 'utils.dart'; - enum RTCRtpMediaType { RTCRtpMediaTypeAudio, RTCRtpMediaTypeVideo, @@ -37,8 +34,6 @@ class RTCRtpReceiver { } /// private: - final MethodChannel _channel = WebRTC.methodChannel(); - String _peerConnectionId; String _id; MediaStreamTrack _track; RTCRtpParameters _parameters; @@ -46,10 +41,6 @@ class RTCRtpReceiver { /// public: OnFirstPacketReceivedCallback onFirstPacketReceived; - set peerConnectionId(String id) { - _peerConnectionId = id; - } - /// The WebRTC specification only defines RTCRtpParameters in terms of senders, /// but this API also applies them to receivers, similar to ORTC: /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. From 7cb1918546d1b4a80fe126d95f673dced50f8bbf Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Fri, 9 Oct 2020 14:25:31 +0800 Subject: [PATCH 08/26] Add PeerConnectionState and add RTCTrackEvent. --- .../webrtc/MethodCallHandlerImpl.java | 18 +++---- .../webrtc/PeerConnectionObserver.java | 30 +++++++++++- lib/src/enums.dart | 28 +++++++++++ lib/src/rtc_peerconnection.dart | 47 +++++++++---------- lib/src/rtc_rtp_parameters.dart | 25 +++++----- lib/src/rtc_rtp_transceiver.dart | 9 +++- lib/src/rtc_track_event.dart | 21 +++++++++ 7 files changed, 131 insertions(+), 47 deletions(-) create mode 100644 lib/src/rtc_track_event.dart diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index cbd0383f23..ccda11f20c 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -580,16 +580,16 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { } case "addTransceiver": { String peerConnectionId = call.argument("peerConnectionId"); - String trackId = call.argument("trackId"); Map transceiverInit = call.argument("transceiverInit"); - addTransceiver(peerConnectionId, trackId, transceiverInit, result); - break; - } - case "addTransceiverOfType": { - String peerConnectionId = call.argument("peerConnectionId"); - String mediaType = call.argument("mediaType"); - Map transceiverInit = call.argument("transceiverInit"); - addTransceiverOfType(peerConnectionId, mediaType, transceiverInit, result); + if(call.hasArgument("trackId")) { + String trackId = call.argument("trackId"); + addTransceiver(peerConnectionId, trackId, transceiverInit, result); + } else if(call.hasArgument("")) { + String mediaType = call.argument("mediaType"); + addTransceiverOfType(peerConnectionId, mediaType, transceiverInit, result); + } else { + resultError("addTransceiver", "Incomplete parameters", result); + } break; } case "rtpTransceiverSetDirection": { diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index bfc5dda6b0..d08784d416 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -437,7 +437,16 @@ public void onSignalingChange(PeerConnection.SignalingState signalingState) { sendEvent(params); } - @Nullable + @Override + public void onConnectionChange(PeerConnection.PeerConnectionState connectionState) { + Log.d(TAG, "onConnectionChange" + connectionState.name()); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "peerConnectionState"); + params.putString("state", connectionStateString(connectionState)); + sendEvent(params); + } + + @Nullable private String iceConnectionStateString(PeerConnection.IceConnectionState iceConnectionState) { switch (iceConnectionState) { case NEW: @@ -490,6 +499,25 @@ private String signalingStateString(PeerConnection.SignalingState signalingState return null; } + @Nullable + private String connectionStateString(PeerConnection.PeerConnectionState connectionState) { + switch (connectionState) { + case NEW: + return "new"; + case CONNECTING: + return "connecting"; + case CONNECTED: + return "connected"; + case DISCONNECTED: + return "disconnected"; + case FAILED: + return "failed"; + case CLOSED: + return "closed"; + } + return null; + } + @Nullable private String transceiverDirectionString(RtpTransceiver.RtpTransceiverDirection direction) { switch (direction) { diff --git a/lib/src/enums.dart b/lib/src/enums.dart index 7bc1721d20..a6a13277a6 100644 --- a/lib/src/enums.dart +++ b/lib/src/enums.dart @@ -19,6 +19,15 @@ enum RTCSignalingState { RTCSignalingStateClosed } +enum RTCPeerConnectionState { + RTCPeerConnectionStateClosed, + RTCPeerConnectionStateFailed, + RTCPeerConnectionStateDisconnected, + RTCPeerConnectionStateNew, + RTCPeerConnectionStateConnecting, + RTCPeerConnectionStateConnected +} + enum RTCIceGatheringState { RTCIceGatheringStateNew, RTCIceGatheringStateGathering, @@ -106,3 +115,22 @@ RTCDataChannelState rtcDataChannelStateForString(String state) { } return RTCDataChannelState.RTCDataChannelClosed; } + +RTCPeerConnectionState peerConnectionStateForString(String state) { + switch (state) { + case 'new': + return RTCPeerConnectionState.RTCPeerConnectionStateNew; + case 'connecting': + return RTCPeerConnectionState.RTCPeerConnectionStateConnecting; + case 'connected': + return RTCPeerConnectionState.RTCPeerConnectionStateConnected; + case 'closed': + return RTCPeerConnectionState.RTCPeerConnectionStateClosed; + case 'disconnected': + return RTCPeerConnectionState.RTCPeerConnectionStateDisconnected; + case 'failed': + return RTCPeerConnectionState.RTCPeerConnectionStateFailed; + } + + return RTCPeerConnectionState.RTCPeerConnectionStateClosed; +} diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index 08bb61ee0b..134cc3aeeb 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -1,4 +1,5 @@ import 'dart:async'; + import 'package:flutter/services.dart'; import 'enums.dart'; @@ -12,12 +13,15 @@ import 'rtc_rtp_sender.dart'; import 'rtc_rtp_transceiver.dart'; import 'rtc_session_description.dart'; import 'rtc_stats_report.dart'; +import 'rtc_track_event.dart'; import 'utils.dart'; /* * Delegate for PeerConnection. */ typedef SignalingStateCallback = void Function(RTCSignalingState state); +typedef PeerConnectionStateCallback = void Function( + RTCPeerConnectionState state); typedef IceGatheringStateCallback = void Function(RTCIceGatheringState state); typedef IceConnectionStateCallback = void Function(RTCIceConnectionState state); typedef IceCandidateCallback = void Function(RTCIceCandidate candidate); @@ -33,7 +37,7 @@ typedef RenegotiationNeededCallback = void Function(); /// Unified-Plan typedef UnifiedPlanAddTrackCallback = void Function(RTCRtpReceiver receiver, [List mediaStreams]); -typedef UnifiedPlanTrackCallback = void Function(RTCRtpTransceiver transceiver); +typedef UnifiedPlanTrackCallback = void Function(RTCTrackEvent event); /* * PeerConnection @@ -57,10 +61,12 @@ class RTCPeerConnection { RTCDataChannel _dataChannel; Map _configuration; RTCSignalingState _signalingState; + RTCPeerConnectionState _connectionState; RTCIceGatheringState _iceGatheringState; RTCIceConnectionState _iceConnectionState; // public: delegate SignalingStateCallback onSignalingState; + PeerConnectionStateCallback onConnectionState; IceGatheringStateCallback onIceGatheringState; IceConnectionStateCallback onIceConnectionState; IceCandidateCallback onIceCandidate; @@ -91,6 +97,12 @@ class RTCPeerConnection { RTCIceConnectionState get iceConnectionState => _iceConnectionState; + RTCPeerConnectionState get connectionState => _connectionState; + + Future get localDescription => getLocalDescription(); + + Future get remoteDescription => getRemoteDescription(); + /* * PeerConnection event listener. */ @@ -102,6 +114,10 @@ class RTCPeerConnection { _signalingState = signalingStateForString(map['state']); onSignalingState?.call(_signalingState); break; + case 'peerConnectionState': + _connectionState = peerConnectionStateForString(map['state']); + onConnectionState?.call(_connectionState); + break; case 'iceGatheringState': _iceGatheringState = iceGatheringStateforString(map['state']); onIceGatheringState?.call(_iceGatheringState); @@ -188,7 +204,7 @@ class RTCPeerConnection { /// Unified-Plan case 'onTrack': - onTrack?.call(RTCRtpTransceiver.fromMap(map['transceiver'])); + onTrack?.call(RTCTrackEvent.fromMap(map)); break; case 'onAddTrack2': var streamsParams = map['mediaStreams'] as List>; @@ -463,32 +479,15 @@ class RTCPeerConnection { } } - Future addTransceiver(MediaStreamTrack track, - [RTCRtpTransceiverInit init]) async { + Future addTransceiver( + {MediaStreamTrack track, String kind, RTCRtpTransceiverInit init}) async { try { final response = await _channel.invokeMethod('addTransceiver', { 'peerConnectionId': _peerConnectionId, - 'trackId': track.id, - 'transceiverInit': init?.toMap() - }); - var transceiver = RTCRtpTransceiver.fromMap(response); - transceiver.peerConnectionId = _peerConnectionId; - _transceivers.add(transceiver); - return transceiver; - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::addTransceiver: ${e.message}'; - } - } - - Future addTransceiverOfType(RTCRtpMediaType mediaType, - [RTCRtpTransceiverInit init]) async { - try { - final response = - await _channel.invokeMethod('addTransceiverOfType', { - 'peerConnectionId': _peerConnectionId, - 'mediaType': typeRTCRtpMediaTypetoString[mediaType], - 'transceiverInit': init?.toMap() + if (track != null) 'trackId': track.id, + if (kind != null) 'mediaType': typeRTCRtpMediaTypetoString[kind], + if (init != null) 'transceiverInit': init.toMap() }); var transceiver = RTCRtpTransceiver.fromMap(response); transceiver.peerConnectionId = _peerConnectionId; diff --git a/lib/src/rtc_rtp_parameters.dart b/lib/src/rtc_rtp_parameters.dart index ffc2f3d5aa..f6c8dc135e 100644 --- a/lib/src/rtc_rtp_parameters.dart +++ b/lib/src/rtc_rtp_parameters.dart @@ -40,25 +40,25 @@ class RTCRTPCodec { class RTCRtpEncoding { RTCRtpEncoding( - this.rid, + {this.rid, this.active, this.maxBitrateBps, this.maxFramerate, this.minBitrateBps, this.numTemporalLayers, this.scaleResolutionDownBy, - this.ssrc); + this.ssrc}); factory RTCRtpEncoding.fromMap(Map map) { return RTCRtpEncoding( - map['rid'], - map['active'], - map['maxBitrateBps'], - map['maxFramerate'], - map['minBitrateBps'], - map['numTemporalLayers'], - map['scaleResolutionDownBy'], - map['ssrc']); + rid: map['rid'], + active: map['active'], + maxBitrateBps: map['maxBitrateBps'], + maxFramerate: map['maxFramerate'], + minBitrateBps: map['minBitrateBps'], + numTemporalLayers: map['numTemporalLayers'], + scaleResolutionDownBy: map['scaleResolutionDownBy'], + ssrc: map['ssrc']); } /// If non-null, this represents the RID that identifies this encoding layer. @@ -106,9 +106,10 @@ class RTCRtpEncoding { } class RTCHeaderExtension { - RTCHeaderExtension(this.uri, this.id, this.encrypted); + RTCHeaderExtension({this.uri, this.id, this.encrypted}); factory RTCHeaderExtension.fromMap(Map map) { - return RTCHeaderExtension(map['uri'], map['id'], map['encrypted']); + return RTCHeaderExtension( + uri: map['uri'], id: map['id'], encrypted: map['encrypted']); } /// The URI of the RTP header extension, as defined in RFC5285. diff --git a/lib/src/rtc_rtp_transceiver.dart b/lib/src/rtc_rtp_transceiver.dart index ea84df424c..7999d96841 100644 --- a/lib/src/rtc_rtp_transceiver.dart +++ b/lib/src/rtc_rtp_transceiver.dart @@ -1,6 +1,7 @@ import 'dart:async'; import 'package:flutter/services.dart'; +import 'package:flutter_webrtc/src/rtc_rtp_parameters.dart'; import 'rtc_rtp_receiver.dart'; import 'rtc_rtp_sender.dart'; @@ -29,16 +30,22 @@ final typeRtpTransceiverDirectionToString = RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionInactive: 'inactive', }; +List listToRtpEncodings(List> list) { + return list.map((e) => RTCRtpEncoding.fromMap(e)).toList(); +} + class RTCRtpTransceiverInit { - RTCRtpTransceiverInit(this.direction, this.streamIds); + RTCRtpTransceiverInit(this.direction, this.sendEncodings, this.streamIds); factory RTCRtpTransceiverInit.fromMap(Map map) { return RTCRtpTransceiverInit( typeStringToRtpTransceiverDirection[map['direction']], + listToRtpEncodings(map['sendEncodings']), map['streamIds']); } RTCRtpTransceiverDirection direction; List streamIds; + List sendEncodings; Map toMap() { return { diff --git a/lib/src/rtc_track_event.dart b/lib/src/rtc_track_event.dart new file mode 100644 index 0000000000..e190377c53 --- /dev/null +++ b/lib/src/rtc_track_event.dart @@ -0,0 +1,21 @@ +import 'media_stream.dart'; +import 'media_stream_track.dart'; +import 'rtc_rtp_receiver.dart'; +import 'rtc_rtp_transceiver.dart'; + +class RTCTrackEvent { + RTCTrackEvent({this.receiver, this.streams, this.track, this.transceiver}); + factory RTCTrackEvent.fromMap(Map map) { + var streamsParams = map['streams'] as List>; + var streams = streamsParams.map((e) => MediaStream.fromMap(e)).toList(); + return RTCTrackEvent( + receiver: RTCRtpReceiver.fromMap(map['receiver']), + streams: streams, + track: MediaStreamTrack.fromMap(map['track']), + transceiver: RTCRtpTransceiver.fromMap(map['transceiver'])); + } + final RTCRtpReceiver receiver; + final List streams; + final MediaStreamTrack track; + final RTCRtpTransceiver transceiver; +} From cb1a950730edc959a726ec5d83130609c79d7bcd Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Sun, 11 Oct 2020 21:32:43 +0800 Subject: [PATCH 09/26] Add example for unified-plan. --- .../webrtc/MethodCallHandlerImpl.java | 9 ---- .../webrtc/PeerConnectionObserver.java | 28 +++++++---- example/lib/src/loopback_sample.dart | 42 ++++++++++++---- lib/flutter_webrtc.dart | 2 + lib/src/media_stream.dart | 5 ++ lib/src/rtc_peerconnection.dart | 19 ++++--- lib/src/rtc_rtp_parameters.dart | 20 ++++++-- lib/src/rtc_rtp_transceiver.dart | 50 +++++++++---------- 8 files changed, 107 insertions(+), 68 deletions(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index ccda11f20c..9490ca553a 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -1319,8 +1319,6 @@ public void onSetSuccess() { public void peerConnectionSetLocalDescription(ConstraintsMap sdpMap, final String id, final Result result) { PeerConnection peerConnection = getPeerConnection(id); - - Log.d(TAG, "peerConnectionSetLocalDescription() start"); if (peerConnection != null) { SessionDescription sdp = new SessionDescription( Type.fromCanonicalForm(sdpMap.getString("type")), @@ -1349,15 +1347,11 @@ public void onSetFailure(String s) { } else { resultError("peerConnectionSetLocalDescription", "WEBRTC_SET_LOCAL_DESCRIPTION_ERROR: peerConnection is null", result); } - Log.d(TAG, "peerConnectionSetLocalDescription() end"); } public void peerConnectionSetRemoteDescription(final ConstraintsMap sdpMap, final String id, final Result result) { PeerConnection peerConnection = getPeerConnection(id); - // final String d = sdpMap.getString("type"); - - Log.d(TAG, "peerConnectionSetRemoteDescription() start"); if (peerConnection != null) { SessionDescription sdp = new SessionDescription( Type.fromCanonicalForm(sdpMap.getString("type")), @@ -1386,14 +1380,12 @@ public void onSetFailure(String s) { } else { resultError("peerConnectionSetRemoteDescription", "WEBRTC_SET_REMOTE_DESCRIPTION_ERROR: peerConnection is null", result); } - Log.d(TAG, "peerConnectionSetRemoteDescription() end"); } public void peerConnectionAddICECandidate(ConstraintsMap candidateMap, final String id, final Result result) { boolean res = false; PeerConnection peerConnection = getPeerConnection(id); - Log.d(TAG, "peerConnectionAddICECandidate() start"); if (peerConnection != null) { IceCandidate candidate = new IceCandidate( candidateMap.getString("sdpMid"), @@ -1405,7 +1397,6 @@ public void peerConnectionAddICECandidate(ConstraintsMap candidateMap, final Str resultError("peerConnectionAddICECandidate", "peerConnection is null", result); } result.success(res); - Log.d(TAG, "peerConnectionAddICECandidate() end"); } public void peerConnectionGetStats(String trackId, String id, final Result result) { diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index d08784d416..bfc9b0c2a2 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -341,6 +341,19 @@ public void onRemoveStream(MediaStream mediaStream) { sendEvent(params); } + @Override + public void onTrack(RtpTransceiver transceiver) { + /* + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onTrack"); + params.putMap("transceiver", transceiverToMap(transceiver)); + params.putMap("receiver", rtpReceiverToMap(transceiver.getReceiver())); + params.putMap("track", mediaTrackToMap(transceiver.getReceiver().track())); + params.putArray("streams", new ConstraintsArray().toArrayList()); + sendEvent(params); + */ + } + @Override public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) { Log.d(TAG, "onAddTrack"); @@ -365,6 +378,7 @@ public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) { params.putMap("track", trackInfo.toMap()); sendEvent(params); } + // For unified-plan ConstraintsMap params = new ConstraintsMap(); ConstraintsArray streams = new ConstraintsArray(); @@ -372,9 +386,11 @@ public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) { MediaStream stream = mediaStreams[i]; streams.pushMap(new ConstraintsMap(mediaStreamToMap(stream))); } - params.putArray("mediaStreams", streams.toArrayList()); + + params.putString("event", "onTrack"); + params.putArray("streams", streams.toArrayList()); + params.putMap("track", mediaTrackToMap(receiver.track())); params.putMap("receiver", rtpReceiverToMap(receiver)); - params.putString("event", "onAddTrack2"); sendEvent(params); } @@ -699,14 +715,6 @@ Map transceiverToMap(RtpTransceiver transceiver){ return info.toMap(); } - @Override - public void onTrack(RtpTransceiver transceiver) { - ConstraintsMap params = new ConstraintsMap(); - params.putString("event", "onTrack"); - params.putMap("transceiver", transceiverToMap(transceiver)); - sendEvent(params); - } - public void createSender(String kind, String streamId, Result result){ RtpSender sender = peerConnection.createSender(kind, streamId); senders.put(sender.id(),sender); diff --git a/example/lib/src/loopback_sample.dart b/example/lib/src/loopback_sample.dart index 68331fe24e..548be8c28c 100644 --- a/example/lib/src/loopback_sample.dart +++ b/example/lib/src/loopback_sample.dart @@ -70,10 +70,7 @@ class _MyAppState extends State { print(state); } - void _onAddStream(MediaStream stream) { - print('addStream: ' + stream.id); - _remoteRenderer.srcObject = stream; - } + void _onAddStream(MediaStream stream) {} void _onRemoveStream(MediaStream stream) { _remoteRenderer.srcObject = null; @@ -84,6 +81,14 @@ class _MyAppState extends State { _peerConnection.addCandidate(candidate); } + void _onTrack(RTCTrackEvent event) { + print('onTrack'); + if (event.track.kind == 'video' && event.streams.isNotEmpty) { + print('New stream: ' + event.streams[0].id); + _remoteRenderer.srcObject = event.streams[0]; + } + } + void _onRenegotiationNeeded() { print('RenegotiationNeeded'); } @@ -107,7 +112,8 @@ class _MyAppState extends State { var configuration = { 'iceServers': [ {'url': 'stun:stun.l.google.com:19302'}, - ] + ], + 'sdpSemantics': 'unified-plan' }; final offerSdpConstraints = { @@ -128,8 +134,6 @@ class _MyAppState extends State { if (_peerConnection != null) return; try { - _localStream = await MediaDevices.getUserMedia(mediaConstraints); - _localRenderer.srcObject = _localStream; _peerConnection = await createPeerConnection(configuration, loopbackConstraints); @@ -141,7 +145,25 @@ class _MyAppState extends State { _peerConnection.onIceCandidate = _onCandidate; _peerConnection.onRenegotiationNeeded = _onRenegotiationNeeded; - await _peerConnection.addStream(_localStream); + _peerConnection.onTrack = _onTrack; + + _localStream = await MediaDevices.getUserMedia(mediaConstraints); + _localRenderer.srcObject = _localStream; + + await _peerConnection.addTransceiver( + track: _localStream.getAudioTracks()[0], + init: RTCRtpTransceiverInit( + direction: TransceiverDirection.SendRecv, + streamIds: [_localStream.id]), + ); + await _peerConnection.addTransceiver( + track: _localStream.getVideoTracks()[0], + init: RTCRtpTransceiverInit( + direction: TransceiverDirection.SendRecv, + streamIds: [_localStream.id]), + ); + + //await _peerConnection.addStream(_localStream); var description = await _peerConnection.createOffer(offerSdpConstraints); print(description.sdp); await _peerConnection.setLocalDescription(description); @@ -153,7 +175,7 @@ class _MyAppState extends State { } if (!mounted) return; - _timer = Timer.periodic(Duration(seconds: 1), handleStatsReport); + //_timer = Timer.periodic(Duration(seconds: 1), handleStatsReport); setState(() { _inCalling = true; @@ -173,7 +195,7 @@ class _MyAppState extends State { setState(() { _inCalling = false; }); - _timer.cancel(); + //_timer.cancel(); } void _sendDtmf() async { diff --git a/lib/flutter_webrtc.dart b/lib/flutter_webrtc.dart index a0a39116ff..f33cfa239f 100644 --- a/lib/flutter_webrtc.dart +++ b/lib/flutter_webrtc.dart @@ -19,9 +19,11 @@ export 'src/rtc_peerconnection.dart' if (dart.library.html) 'src/web/rtc_peerconnection.dart'; export 'src/rtc_peerconnection_factory.dart' if (dart.library.html) 'src/web/rtc_peerconnection_factory.dart'; +export 'src/rtc_rtp_transceiver.dart'; export 'src/rtc_session_description.dart' if (dart.library.html) 'src/web/rtc_session_description.dart'; export 'src/rtc_stats_report.dart'; +export 'src/rtc_track_event.dart'; export 'src/rtc_video_view.dart' if (dart.library.html) 'src/web/rtc_video_view.dart'; export 'src/utils.dart' if (dart.library.html) 'src/web/utils.dart'; diff --git a/lib/src/media_stream.dart b/lib/src/media_stream.dart index 2a6844a7af..01d6b6cd8f 100644 --- a/lib/src/media_stream.dart +++ b/lib/src/media_stream.dart @@ -1,7 +1,10 @@ import 'dart:async'; + import 'media_stream_track.dart'; import 'utils.dart'; +typedef MediaTrackTrackCallback = void Function(MediaStreamTrack track); + class MediaStream { MediaStream(this._streamId, this._ownerTag); factory MediaStream.fromMap(Map map) { @@ -15,6 +18,8 @@ class MediaStream { final _videoTracks = []; String get ownerTag => _ownerTag; String get id => _streamId; + MediaTrackTrackCallback onAddTrack; + MediaTrackTrackCallback onRemoveTrack; void setMediaTracks(List audioTracks, List videoTracks) { _audioTracks.clear(); diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index 134cc3aeeb..7390bc19b2 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -203,15 +203,18 @@ class RTCPeerConnection { break; /// Unified-Plan - case 'onTrack': - onTrack?.call(RTCTrackEvent.fromMap(map)); + case 'onTrack2': + //onTrack?.call(RTCTrackEvent.fromMap(map)); break; - case 'onAddTrack2': - var streamsParams = map['mediaStreams'] as List>; - var mediaStreams = - streamsParams.map((e) => MediaStream.fromMap(e)).toList(); - onAddTrack2?.call( - RTCRtpReceiver.fromMap(map['receiver']), mediaStreams); + case 'onTrack': + var params = map['streams'] as List; + var streams = params.map((e) => MediaStream.fromMap(e)).toList(); + onTrack?.call(RTCTrackEvent( + receiver: RTCRtpReceiver.fromMap(map['receiver']), + track: MediaStreamTrack.fromMap(map['track']), + streams: streams)); + + //RTCRtpReceiver.fromMap(map['receiver']), mediaStreams); break; } } diff --git a/lib/src/rtc_rtp_parameters.dart b/lib/src/rtc_rtp_parameters.dart index f6c8dc135e..e2cbf774f0 100644 --- a/lib/src/rtc_rtp_parameters.dart +++ b/lib/src/rtc_rtp_parameters.dart @@ -1,12 +1,22 @@ import 'rtc_rtcp_parameters.dart'; class RTCRTPCodec { - RTCRTPCodec(this.payloadType, this.name, this.kind, this.clockRate, - this.numChannels, this.parameters); + RTCRTPCodec( + {this.payloadType, + this.name, + this.kind, + this.clockRate, + this.numChannels, + this.parameters}); factory RTCRTPCodec.fromMap(Map map) { - return RTCRTPCodec(map['payloadType'], map['name'], map['kind'], - map['clockRate'], map['numChannels'], map['parameters']); + return RTCRTPCodec( + payloadType: map['payloadType'], + name: map['name'], + kind: map['kind'], + clockRate: map['clockRate'], + numChannels: map['numChannels'] ?? 1, + parameters: map['parameters']); } // Payload type used to identify this codec in RTP packets. int payloadType; @@ -24,7 +34,7 @@ class RTCRTPCodec { int numChannels; /// The "format specific parameters" field from the "a=fmtp" line in the SDP - Map parameters; + Map parameters; Map toMap() { return { diff --git a/lib/src/rtc_rtp_transceiver.dart b/lib/src/rtc_rtp_transceiver.dart index 7999d96841..aff30febf9 100644 --- a/lib/src/rtc_rtp_transceiver.dart +++ b/lib/src/rtc_rtp_transceiver.dart @@ -7,27 +7,25 @@ import 'rtc_rtp_receiver.dart'; import 'rtc_rtp_sender.dart'; import 'utils.dart'; -enum RTCRtpTransceiverDirection { - RTCRtpTransceiverDirectionSendRecv, - RTCRtpTransceiverDirectionSendOnly, - RTCRtpTransceiverDirectionRecvOnly, - RTCRtpTransceiverDirectionInactive, +enum TransceiverDirection { + SendRecv, + SendOnly, + RecvOnly, + Inactive, } -final typeStringToRtpTransceiverDirection = - { - 'sendrecv': RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionSendRecv, - 'sendonly': RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionSendOnly, - 'recvonly': RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionRecvOnly, - 'inactive': RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionInactive, +final typeStringToRtpTransceiverDirection = { + 'sendrecv': TransceiverDirection.SendRecv, + 'sendonly': TransceiverDirection.SendOnly, + 'recvonly': TransceiverDirection.RecvOnly, + 'inactive': TransceiverDirection.Inactive, }; -final typeRtpTransceiverDirectionToString = - { - RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionSendRecv: 'sendrecv', - RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionSendOnly: 'sendonly', - RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionRecvOnly: 'recvonly', - RTCRtpTransceiverDirection.RTCRtpTransceiverDirectionInactive: 'inactive', +final typeRtpTransceiverDirectionToString = { + TransceiverDirection.SendRecv: 'sendrecv', + TransceiverDirection.SendOnly: 'sendonly', + TransceiverDirection.RecvOnly: 'recvonly', + TransceiverDirection.Inactive: 'inactive', }; List listToRtpEncodings(List> list) { @@ -35,15 +33,15 @@ List listToRtpEncodings(List> list) { } class RTCRtpTransceiverInit { - RTCRtpTransceiverInit(this.direction, this.sendEncodings, this.streamIds); + RTCRtpTransceiverInit({this.direction, this.sendEncodings, this.streamIds}); factory RTCRtpTransceiverInit.fromMap(Map map) { return RTCRtpTransceiverInit( - typeStringToRtpTransceiverDirection[map['direction']], - listToRtpEncodings(map['sendEncodings']), - map['streamIds']); + direction: typeStringToRtpTransceiverDirection[map['direction']], + sendEncodings: listToRtpEncodings(map['sendEncodings']), + streamIds: map['streamIds']); } - RTCRtpTransceiverDirection direction; + TransceiverDirection direction; List streamIds; List sendEncodings; @@ -73,7 +71,7 @@ class RTCRtpTransceiver { String _peerConnectionId; String _id; bool _stop; - RTCRtpTransceiverDirection _direction; + TransceiverDirection _direction; String _mid; RTCRtpSender _sender; RTCRtpReceiver _receiver; @@ -82,7 +80,7 @@ class RTCRtpTransceiver { _peerConnectionId = id; } - RTCRtpTransceiverDirection get currentDirection => _direction; + TransceiverDirection get currentDirection => _direction; String get mid => _mid; @@ -94,7 +92,7 @@ class RTCRtpTransceiver { String get transceiverId => _id; - Future setDirection(RTCRtpTransceiverDirection direction) async { + Future setDirection(TransceiverDirection direction) async { try { await _channel .invokeMethod('rtpTransceiverSetDirection', { @@ -107,7 +105,7 @@ class RTCRtpTransceiver { } } - Future getCurrentDirection() async { + Future getCurrentDirection() async { try { final response = await _channel.invokeMethod( 'rtpTransceiverGetCurrentDirection', { From fa42f01a25852b16c3df8943f73002865778b17c Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Sun, 11 Oct 2020 21:39:35 +0800 Subject: [PATCH 10/26] Update. --- lib/src/media_stream.dart | 6 +++--- lib/src/media_stream_track.dart | 4 ++++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/lib/src/media_stream.dart b/lib/src/media_stream.dart index 01d6b6cd8f..6c8c8ff540 100644 --- a/lib/src/media_stream.dart +++ b/lib/src/media_stream.dart @@ -3,7 +3,7 @@ import 'dart:async'; import 'media_stream_track.dart'; import 'utils.dart'; -typedef MediaTrackTrackCallback = void Function(MediaStreamTrack track); +typedef MediaTrackCallback = void Function(MediaStreamTrack track); class MediaStream { MediaStream(this._streamId, this._ownerTag); @@ -18,8 +18,8 @@ class MediaStream { final _videoTracks = []; String get ownerTag => _ownerTag; String get id => _streamId; - MediaTrackTrackCallback onAddTrack; - MediaTrackTrackCallback onRemoveTrack; + MediaTrackCallback onAddTrack; + MediaTrackCallback onRemoveTrack; void setMediaTracks(List audioTracks, List videoTracks) { _audioTracks.clear(); diff --git a/lib/src/media_stream_track.dart b/lib/src/media_stream_track.dart index a399c1d6e7..6206e7f97a 100644 --- a/lib/src/media_stream_track.dart +++ b/lib/src/media_stream_track.dart @@ -1,6 +1,8 @@ import 'dart:async'; import 'utils.dart'; +typedef StreamTrackCallback = Function(); + class MediaStreamTrack { MediaStreamTrack(this._trackId, this._label, this._kind, this._enabled); factory MediaStreamTrack.fromMap(Map map) { @@ -13,6 +15,8 @@ class MediaStreamTrack { final String _label; final String _kind; bool _enabled; + StreamTrackCallback onended; + StreamTrackCallback onmute; set enabled(bool enabled) { _channel.invokeMethod('mediaStreamTrackSetEnable', From a55a1acbafd12a769e1876cd7d697bda330b0aea Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Sun, 11 Oct 2020 21:42:39 +0800 Subject: [PATCH 11/26] Reduce code. --- lib/src/rtc_peerconnection.dart | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index 7390bc19b2..f7cac5bd7e 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -35,8 +35,6 @@ typedef RTCDataChannelCallback = void Function(RTCDataChannel channel); typedef RenegotiationNeededCallback = void Function(); /// Unified-Plan -typedef UnifiedPlanAddTrackCallback = void Function(RTCRtpReceiver receiver, - [List mediaStreams]); typedef UnifiedPlanTrackCallback = void Function(RTCTrackEvent event); /* @@ -78,10 +76,7 @@ class RTCPeerConnection { RenegotiationNeededCallback onRenegotiationNeeded; /// Unified-Plan - // TODO(cloudwebrtc): for unified-plan. - UnifiedPlanAddTrackCallback onAddTrack2; UnifiedPlanTrackCallback onTrack; - UnifiedPlanTrackCallback onRemoveTrack2; final Map defaultSdpConstraints = { 'mandatory': { @@ -203,9 +198,6 @@ class RTCPeerConnection { break; /// Unified-Plan - case 'onTrack2': - //onTrack?.call(RTCTrackEvent.fromMap(map)); - break; case 'onTrack': var params = map['streams'] as List; var streams = params.map((e) => MediaStream.fromMap(e)).toList(); @@ -213,8 +205,6 @@ class RTCPeerConnection { receiver: RTCRtpReceiver.fromMap(map['receiver']), track: MediaStreamTrack.fromMap(map['track']), streams: streams)); - - //RTCRtpReceiver.fromMap(map['receiver']), mediaStreams); break; } } From c9e143c760d2b289a02343dd2be29de915c35fc0 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Mon, 12 Oct 2020 08:34:09 +0800 Subject: [PATCH 12/26] Fix simulcast. --- .../webrtc/MethodCallHandlerImpl.java | 2 +- .../webrtc/PeerConnectionObserver.java | 66 ++++++++++++++----- example/lib/src/loopback_sample.dart | 38 +++++++++-- lib/flutter_webrtc.dart | 3 + lib/src/rtc_peerconnection.dart | 4 +- lib/src/rtc_rtp_parameters.dart | 21 +++--- lib/src/rtc_rtp_transceiver.dart | 15 +++-- 7 files changed, 111 insertions(+), 38 deletions(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index 9490ca553a..4499b8a674 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -584,7 +584,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { if(call.hasArgument("trackId")) { String trackId = call.argument("trackId"); addTransceiver(peerConnectionId, trackId, transceiverInit, result); - } else if(call.hasArgument("")) { + } else if(call.hasArgument("mediaType")) { String mediaType = call.argument("mediaType"); addTransceiverOfType(peerConnectionId, mediaType, transceiverInit, result); } else { diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index bfc9b0c2a2..19cbe60561 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -11,6 +11,7 @@ import io.flutter.plugin.common.MethodChannel.Result; import java.lang.reflect.Field; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Map; @@ -549,7 +550,7 @@ private String transceiverDirectionString(RtpTransceiver.RtpTransceiverDirection return null; } - private RtpTransceiver.RtpTransceiverDirection typStringToTransceiverDirection(String direction) { + private RtpTransceiver.RtpTransceiverDirection stringToTransceiverDirection(String direction) { switch (direction) { case "sendrecv": return RtpTransceiver.RtpTransceiverDirection.SEND_RECV; @@ -563,6 +564,46 @@ private RtpTransceiver.RtpTransceiverDirection typStringToTransceiverDirection(S return RtpTransceiver.RtpTransceiverDirection.INACTIVE; } + private MediaStreamTrack.MediaType stringToMediaType(String mediaType) { + MediaStreamTrack.MediaType type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + if(mediaType.equals("audio")) + type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; + else if(mediaType.equals("video")) + type = MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO; + return type; + } + + private RtpParameters.Encoding mapToEncoding(Map parameters) { + Boolean active = true; + if( parameters.get("active") != null) { + active = (Boolean) parameters.get("active"); + } + Double scaleResolutionDownBy = 1.0; + if( parameters.get("scaleResolutionDownBy") != null) { + scaleResolutionDownBy = (Double) parameters.get("scaleResolutionDownBy"); + } + + return new RtpParameters.Encoding((String)parameters.get("rid"),active, scaleResolutionDownBy); + } + + private RtpTransceiver.RtpTransceiverInit mapToRtpTransceiverInit(Map parameters) { + List streamIds = (List)parameters.get("streamIds"); + List> encodingsParams = (List>)parameters.get("sendEncodings"); + String direction = (String)parameters.get("direction"); + List sendEncodings = new ArrayList<>(); + RtpTransceiver.RtpTransceiverInit init = null; + if(encodingsParams != null) { + for (int i=0;i< encodingsParams.size();i++){ + Map params = encodingsParams.get(i); + sendEncodings.add(mapToEncoding(params)); + } + init = new RtpTransceiver.RtpTransceiverInit(stringToTransceiverDirection(direction) ,streamIds, sendEncodings); + } else { + init = new RtpTransceiver.RtpTransceiverInit(stringToTransceiverDirection(direction) ,streamIds); + } + return init; + } + private RtpParameters MapToRtpParameters(Map parameters) { RtpParameters rtpParameters = null; return rtpParameters; @@ -606,7 +647,9 @@ private Map rtpParametersToMap(RtpParameters rtpParameters){ if (encoding.scaleResolutionDownBy != null) { map.putDouble("scaleResolutionDownBy", encoding.scaleResolutionDownBy); } - map.putLong("ssrc", encoding.ssrc); + if (encoding.ssrc != null) { + map.putLong("ssrc", encoding.ssrc); + } encodings.pushMap(map); } info.putArray("encodings", encodings.toArrayList()); @@ -750,10 +793,7 @@ public void removeTrack(String senderId, Result result){ public void addTransceiver(MediaStreamTrack track, Map transceiverInit, Result result) { RtpTransceiver transceiver; if(transceiverInit != null){ - List streamIds = (List)transceiverInit.get("streamIds"); - String direction = (String)transceiverInit.get("direction"); - RtpTransceiver.RtpTransceiverInit init = new RtpTransceiver.RtpTransceiverInit(typStringToTransceiverDirection(direction) ,streamIds); - transceiver = peerConnection.addTransceiver(track, init); + transceiver = peerConnection.addTransceiver(track, mapToRtpTransceiverInit(transceiverInit)); } else { transceiver = peerConnection.addTransceiver(track); } @@ -762,19 +802,11 @@ public void addTransceiver(MediaStreamTrack track, Map transceiv } public void addTransceiverOfType(String mediaType, Map transceiverInit, Result result) { - MediaStreamTrack.MediaType type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; - if(mediaType == "audio") - type = MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO; - else if(mediaType == "video") - type = MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO; RtpTransceiver transceiver; if(transceiverInit != null){ - List streamIds = (List)transceiverInit.get("streamIds"); - String direction = (String)transceiverInit.get("direction"); - RtpTransceiver.RtpTransceiverInit init = new RtpTransceiver.RtpTransceiverInit(typStringToTransceiverDirection(direction) ,streamIds); - transceiver = peerConnection.addTransceiver(type, init); + transceiver = peerConnection.addTransceiver(stringToMediaType(mediaType), mapToRtpTransceiverInit(transceiverInit)); } else { - transceiver = peerConnection.addTransceiver(type); + transceiver = peerConnection.addTransceiver(stringToMediaType(mediaType)); } transceivers.put(transceiver.getMid(), transceiver); result.success(transceiverToMap(transceiver)); @@ -786,7 +818,7 @@ public void rtpTransceiverSetDirection(String direction, String transceiverId, R resultError("rtpTransceiverSetDirection", "transceiver is null", result); return; } - transceiver.setDirection(typStringToTransceiverDirection(direction)); + transceiver.setDirection(stringToTransceiverDirection(direction)); result.success(null); } diff --git a/example/lib/src/loopback_sample.dart b/example/lib/src/loopback_sample.dart index 548be8c28c..ce13f329de 100644 --- a/example/lib/src/loopback_sample.dart +++ b/example/lib/src/loopback_sample.dart @@ -17,7 +17,7 @@ class _MyAppState extends State { final _localRenderer = RTCVideoRenderer(); final _remoteRenderer = RTCVideoRenderer(); bool _inCalling = false; - Timer _timer; + //Timer _timer; @override void initState() { @@ -153,19 +153,49 @@ class _MyAppState extends State { await _peerConnection.addTransceiver( track: _localStream.getAudioTracks()[0], init: RTCRtpTransceiverInit( - direction: TransceiverDirection.SendRecv, - streamIds: [_localStream.id]), + direction: TransceiverDirection.SendRecv, streams: [_localStream]), ); + + /* await _peerConnection.addTransceiver( track: _localStream.getVideoTracks()[0], init: RTCRtpTransceiverInit( direction: TransceiverDirection.SendRecv, streamIds: [_localStream.id]), ); + */ + + await _peerConnection.addTransceiver( + track: _localStream.getVideoTracks()[0], + init: RTCRtpTransceiverInit( + direction: TransceiverDirection.SendOnly, + streams: [_localStream], + sendEncodings: [ + // for firefox order matters... first high resolution, then scaled resolutions... + RTCRtpEncoding( + rid: 'f', + ), + RTCRtpEncoding( + rid: 'h', + scaleResolutionDownBy: 2.0, + ), + RTCRtpEncoding( + rid: 'q', + scaleResolutionDownBy: 4.0, + ), + ], + )); + await _peerConnection.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); + await _peerConnection.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); + await _peerConnection.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); //await _peerConnection.addStream(_localStream); var description = await _peerConnection.createOffer(offerSdpConstraints); - print(description.sdp); + var sdp = description.sdp; + print('sdp = $sdp'); await _peerConnection.setLocalDescription(description); //change for loopback. description.type = 'answer'; diff --git a/lib/flutter_webrtc.dart b/lib/flutter_webrtc.dart index f33cfa239f..1bc2671725 100644 --- a/lib/flutter_webrtc.dart +++ b/lib/flutter_webrtc.dart @@ -19,6 +19,9 @@ export 'src/rtc_peerconnection.dart' if (dart.library.html) 'src/web/rtc_peerconnection.dart'; export 'src/rtc_peerconnection_factory.dart' if (dart.library.html) 'src/web/rtc_peerconnection_factory.dart'; +export 'src/rtc_rtp_parameters.dart'; +export 'src/rtc_rtp_receiver.dart'; +export 'src/rtc_rtp_sender.dart'; export 'src/rtc_rtp_transceiver.dart'; export 'src/rtc_session_description.dart' if (dart.library.html) 'src/web/rtc_session_description.dart'; diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index f7cac5bd7e..c218bfbd2d 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -473,7 +473,9 @@ class RTCPeerConnection { } Future addTransceiver( - {MediaStreamTrack track, String kind, RTCRtpTransceiverInit init}) async { + {MediaStreamTrack track, + RTCRtpMediaType kind, + RTCRtpTransceiverInit init}) async { try { final response = await _channel.invokeMethod('addTransceiver', { diff --git a/lib/src/rtc_rtp_parameters.dart b/lib/src/rtc_rtp_parameters.dart index e2cbf774f0..075ac05d5a 100644 --- a/lib/src/rtc_rtp_parameters.dart +++ b/lib/src/rtc_rtp_parameters.dart @@ -91,11 +91,11 @@ class RTCRtpEncoding { int maxFramerate; /// The number of temporal layers for video. - int numTemporalLayers; + int numTemporalLayers = 1; /// If non-null, scale the width and height down by this factor for video. If null, /// implementation default scaling factor will be used. - double scaleResolutionDownBy; + double scaleResolutionDownBy = 1.0; /// SSRC to be used by this encoding. /// Can't be changed between getParameters/setParameters. @@ -103,14 +103,15 @@ class RTCRtpEncoding { Map toMap() { return { - 'rid': rid, - 'active': active, - 'maxBitrateBps': maxBitrateBps, - 'maxFramerate': maxFramerate, - 'minBitrateBps': minBitrateBps, - 'numTemporalLayers': numTemporalLayers, - 'scaleResolutionDownBy': scaleResolutionDownBy, - 'ssrc': ssrc, + if (rid != null) 'rid': rid, + if (active != null) 'active': active, + if (maxBitrateBps != null) 'maxBitrateBps': maxBitrateBps, + if (maxFramerate != null) 'maxFramerate': maxFramerate, + if (minBitrateBps != null) 'minBitrateBps': minBitrateBps, + if (numTemporalLayers != null) 'numTemporalLayers': numTemporalLayers, + if (scaleResolutionDownBy != null) + 'scaleResolutionDownBy': scaleResolutionDownBy, + if (ssrc != null) 'ssrc': ssrc, }; } } diff --git a/lib/src/rtc_rtp_transceiver.dart b/lib/src/rtc_rtp_transceiver.dart index aff30febf9..a32b8a8b9e 100644 --- a/lib/src/rtc_rtp_transceiver.dart +++ b/lib/src/rtc_rtp_transceiver.dart @@ -1,8 +1,9 @@ import 'dart:async'; import 'package:flutter/services.dart'; -import 'package:flutter_webrtc/src/rtc_rtp_parameters.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'rtc_rtp_parameters.dart'; import 'rtc_rtp_receiver.dart'; import 'rtc_rtp_sender.dart'; import 'utils.dart'; @@ -33,22 +34,26 @@ List listToRtpEncodings(List> list) { } class RTCRtpTransceiverInit { - RTCRtpTransceiverInit({this.direction, this.sendEncodings, this.streamIds}); + RTCRtpTransceiverInit({this.direction, this.sendEncodings, this.streams}); factory RTCRtpTransceiverInit.fromMap(Map map) { return RTCRtpTransceiverInit( direction: typeStringToRtpTransceiverDirection[map['direction']], sendEncodings: listToRtpEncodings(map['sendEncodings']), - streamIds: map['streamIds']); + streams: (map['streams'] as List) + .map((e) => MediaStream.fromMap(map)) + .toList()); } TransceiverDirection direction; - List streamIds; + List streams; List sendEncodings; Map toMap() { return { 'direction': typeRtpTransceiverDirectionToString[direction], - 'streamIds': streamIds + if (streams != null) 'streamIds': streams.map((e) => e.id).toList(), + if (sendEncodings != null) + 'sendEncodings': sendEncodings.map((e) => e.toMap()).toList(), }; } } From 45c27ff3b908ec96bf269117a1edd16aa7cdfc87 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Mon, 12 Oct 2020 14:57:15 +0800 Subject: [PATCH 13/26] Fix RtpSender.replaceTrack. --- .../webrtc/FlutterRTCVideoRenderer.java | 3 +- .../webrtc/MethodCallHandlerImpl.java | 37 ++++++++++--------- .../webrtc/PeerConnectionObserver.java | 34 ++++++++++++++--- example/lib/src/loopback_sample.dart | 28 +++++++++----- lib/src/rtc_peerconnection.dart | 4 +- lib/src/rtc_rtp_sender.dart | 12 +++--- lib/src/rtc_rtp_transceiver.dart | 12 +++--- lib/src/rtc_track_event.dart | 6 ++- lib/src/rtc_video_view.dart | 23 +++++++----- 9 files changed, 103 insertions(+), 56 deletions(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java index ca8df79a4b..e593e557fe 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java @@ -170,9 +170,10 @@ private void setVideoTrack(VideoTrack videoTrack) { this.videoTrack = videoTrack; if (videoTrack != null) { + Log.w(TAG, "FlutterRTCVideoRenderer.setVideoTrack, set video track to " + videoTrack.id()); tryAddRendererToVideoTrack(); } else { - Log.w(TAG, "VideoTrack is null"); + Log.w(TAG, "FlutterRTCVideoRenderer.setVideoTrack, set video track to null"); } } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index 4499b8a674..f05a50c05b 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -413,15 +413,18 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { case "videoRendererSetSrcObject": { int textureId = call.argument("textureId"); String streamId = call.argument("streamId"); - String peerConnectionId = call.argument("ownerTag"); + String ownerTag = call.argument("ownerTag"); FlutterRTCVideoRenderer render = renders.get(textureId); - if (render == null) { resultError("videoRendererSetSrcObject", "render [" + textureId + "] not found !", result); return; } - - MediaStream stream = getStreamForId(streamId, peerConnectionId); + MediaStream stream = null; + if (ownerTag.equals("local")) { + stream = localStreams.get(streamId); + } else { + stream = getStreamForId(streamId, ownerTag); + } render.setStream(stream); result.success(null); break; @@ -971,23 +974,23 @@ public Activity getActivity() { } MediaStream getStreamForId(String id, String peerConnectionId) { - MediaStream stream = localStreams.get(id); - - if (stream == null) { - if (peerConnectionId.length() > 0) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + MediaStream stream = null; + if (peerConnectionId.length() > 0) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); + stream = pco.remoteStreams.get(id); + } else { + for (Entry entry : mPeerConnectionObservers + .entrySet()) { + PeerConnectionObserver pco = entry.getValue(); stream = pco.remoteStreams.get(id); - } else { - for (Entry entry : mPeerConnectionObservers - .entrySet()) { - PeerConnectionObserver pco = entry.getValue(); - stream = pco.remoteStreams.get(id); - if (stream != null) { - break; - } + if (stream != null) { + break; } } } + if (stream == null) { + stream = localStreams.get(id); + } return stream; } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index 19cbe60561..f60d01e947 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -574,16 +574,33 @@ else if(mediaType.equals("video")) } private RtpParameters.Encoding mapToEncoding(Map parameters) { - Boolean active = true; + RtpParameters.Encoding encoding = new RtpParameters.Encoding((String)parameters.get("rid"), true, 1.0); + if( parameters.get("active") != null) { - active = (Boolean) parameters.get("active"); + encoding.active = (Boolean) parameters.get("active"); + } + + if( parameters.get("ssrc") != null) { + encoding.ssrc = ((Integer) parameters.get("ssrc")).longValue(); + } + + if( parameters.get("minBitrateBps") != null) { + encoding.minBitrateBps = (Integer) parameters.get("minBitrateBps"); + } + + if( parameters.get("minBitrateBps") != null) { + encoding.minBitrateBps = (Integer) parameters.get("minBitrateBps"); } - Double scaleResolutionDownBy = 1.0; + + if( parameters.get("numTemporalLayers") != null) { + encoding.numTemporalLayers = (Integer) parameters.get("numTemporalLayers"); + } + if( parameters.get("scaleResolutionDownBy") != null) { - scaleResolutionDownBy = (Double) parameters.get("scaleResolutionDownBy"); + encoding.scaleResolutionDownBy = (Double) parameters.get("scaleResolutionDownBy"); } - return new RtpParameters.Encoding((String)parameters.get("rid"),active, scaleResolutionDownBy); + return encoding; } private RtpTransceiver.RtpTransceiverInit mapToRtpTransceiverInit(Map parameters) { @@ -798,6 +815,8 @@ public void addTransceiver(MediaStreamTrack track, Map transceiv transceiver = peerConnection.addTransceiver(track); } transceivers.put(transceiver.getMid(), transceiver); + senders.put(transceiver.getSender().id(), transceiver.getSender()); + receivers.put(transceiver.getReceiver().id(), transceiver.getReceiver()); result.success(transceiverToMap(transceiver)); } @@ -809,6 +828,8 @@ public void addTransceiverOfType(String mediaType, Map transceiv transceiver = peerConnection.addTransceiver(stringToMediaType(mediaType)); } transceivers.put(transceiver.getMid(), transceiver); + senders.put(transceiver.getSender().id(), transceiver.getSender()); + receivers.put(transceiver.getReceiver().id(), transceiver.getReceiver()); result.success(transceiverToMap(transceiver)); } @@ -850,6 +871,7 @@ public void rtpSenderSetParameters(String rtpSenderId, Map param return; } sender.setParameters(MapToRtpParameters(parameters)); + result.success(null); } public void rtpSenderSetTrack(String rtpSenderId, MediaStreamTrack track, Result result, boolean replace) { @@ -859,6 +881,7 @@ public void rtpSenderSetTrack(String rtpSenderId, MediaStreamTrack track, Result return; } sender.setTrack(track, replace ); + result.success(null); } public void rtpSenderDispose(String rtpSenderId, Result result) { @@ -869,5 +892,6 @@ public void rtpSenderDispose(String rtpSenderId, Result result) { } sender.dispose(); senders.remove(rtpSenderId); + result.success(null); } } diff --git a/example/lib/src/loopback_sample.dart b/example/lib/src/loopback_sample.dart index ce13f329de..04f4e223ad 100644 --- a/example/lib/src/loopback_sample.dart +++ b/example/lib/src/loopback_sample.dart @@ -70,7 +70,10 @@ class _MyAppState extends State { print(state); } - void _onAddStream(MediaStream stream) {} + void _onAddStream(MediaStream stream) { + print('New stream: ' + stream.id); + _remoteRenderer.srcObject = stream; + } void _onRemoveStream(MediaStream stream) { _remoteRenderer.srcObject = null; @@ -149,6 +152,7 @@ class _MyAppState extends State { _localStream = await MediaDevices.getUserMedia(mediaConstraints); _localRenderer.srcObject = _localStream; + //await _peerConnection.addStream(_localStream); await _peerConnection.addTransceiver( track: _localStream.getAudioTracks()[0], @@ -156,15 +160,13 @@ class _MyAppState extends State { direction: TransceiverDirection.SendRecv, streams: [_localStream]), ); - /* - await _peerConnection.addTransceiver( + // ignore: unused_local_variable + var transceiver = await _peerConnection.addTransceiver( track: _localStream.getVideoTracks()[0], init: RTCRtpTransceiverInit( - direction: TransceiverDirection.SendRecv, - streamIds: [_localStream.id]), + direction: TransceiverDirection.SendRecv, streams: [_localStream]), ); - */ - + /* await _peerConnection.addTransceiver( track: _localStream.getVideoTracks()[0], init: RTCRtpTransceiverInit( @@ -174,25 +176,28 @@ class _MyAppState extends State { // for firefox order matters... first high resolution, then scaled resolutions... RTCRtpEncoding( rid: 'f', + numTemporalLayers: 3, ), RTCRtpEncoding( rid: 'h', + numTemporalLayers: 3, scaleResolutionDownBy: 2.0, ), RTCRtpEncoding( rid: 'q', + numTemporalLayers: 3, scaleResolutionDownBy: 4.0, ), ], )); + await _peerConnection.addTransceiver( kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); await _peerConnection.addTransceiver( kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); await _peerConnection.addTransceiver( kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); - - //await _peerConnection.addStream(_localStream); + */ var description = await _peerConnection.createOffer(offerSdpConstraints); var sdp = description.sdp; print('sdp = $sdp'); @@ -200,6 +205,11 @@ class _MyAppState extends State { //change for loopback. description.type = 'answer'; await _peerConnection.setRemoteDescription(description); + /* + var stream = await MediaDevices.getDisplayMedia(mediaConstraints); + _localRenderer.srcObject = _localStream; + await transceiver.sender.replaceTrack(stream.getVideoTracks()[0]); + */ } catch (e) { print(e.toString()); } diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index c218bfbd2d..280267c45f 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -484,8 +484,8 @@ class RTCPeerConnection { if (kind != null) 'mediaType': typeRTCRtpMediaTypetoString[kind], if (init != null) 'transceiverInit': init.toMap() }); - var transceiver = RTCRtpTransceiver.fromMap(response); - transceiver.peerConnectionId = _peerConnectionId; + var transceiver = RTCRtpTransceiver.fromMap(response, + peerConnectionId: _peerConnectionId); _transceivers.add(transceiver); return transceiver; } on PlatformException catch (e) { diff --git a/lib/src/rtc_rtp_sender.dart b/lib/src/rtc_rtp_sender.dart index ecd20f10b1..ea61790961 100644 --- a/lib/src/rtc_rtp_sender.dart +++ b/lib/src/rtc_rtp_sender.dart @@ -7,16 +7,18 @@ import 'rtc_rtp_parameters.dart'; import 'utils.dart'; class RTCRtpSender { - RTCRtpSender( - this._id, this._track, this._dtmf, this._parameters, this._ownsTrack); + RTCRtpSender(this._id, this._track, this._dtmf, this._parameters, + this._ownsTrack, this._peerConnectionId); - factory RTCRtpSender.fromMap(Map map) { + factory RTCRtpSender.fromMap(Map map, + {String peerConnectionId}) { return RTCRtpSender( map['senderId'], MediaStreamTrack.fromMap(map['track']), - RTCDTMFSender(map['peerConnectionId']), + RTCDTMFSender(peerConnectionId), RTCRtpParameters.fromMap(map['rtpParameters']), - map['ownsTrack']); + map['ownsTrack'], + peerConnectionId); } final MethodChannel _channel = WebRTC.methodChannel(); diff --git a/lib/src/rtc_rtp_transceiver.dart b/lib/src/rtc_rtp_transceiver.dart index a32b8a8b9e..37794f0291 100644 --- a/lib/src/rtc_rtp_transceiver.dart +++ b/lib/src/rtc_rtp_transceiver.dart @@ -59,16 +59,18 @@ class RTCRtpTransceiverInit { } class RTCRtpTransceiver { - RTCRtpTransceiver( - this._id, this._direction, this._mid, this._sender, this._receiver); + RTCRtpTransceiver(this._id, this._direction, this._mid, this._sender, + this._receiver, _peerConnectionId); - factory RTCRtpTransceiver.fromMap(Map map) { + factory RTCRtpTransceiver.fromMap(Map map, + {String peerConnectionId}) { var transceiver = RTCRtpTransceiver( map['transceiverId'], typeStringToRtpTransceiverDirection[map['direction']], map['mid'], - RTCRtpSender.fromMap(map['sender']), - RTCRtpReceiver.fromMap(map['receiver'])); + RTCRtpSender.fromMap(map['sender'], peerConnectionId: peerConnectionId), + RTCRtpReceiver.fromMap(map['receiver']), + peerConnectionId); return transceiver; } diff --git a/lib/src/rtc_track_event.dart b/lib/src/rtc_track_event.dart index e190377c53..2ef76c16ac 100644 --- a/lib/src/rtc_track_event.dart +++ b/lib/src/rtc_track_event.dart @@ -5,14 +5,16 @@ import 'rtc_rtp_transceiver.dart'; class RTCTrackEvent { RTCTrackEvent({this.receiver, this.streams, this.track, this.transceiver}); - factory RTCTrackEvent.fromMap(Map map) { + factory RTCTrackEvent.fromMap( + Map map, String peerConnectionId) { var streamsParams = map['streams'] as List>; var streams = streamsParams.map((e) => MediaStream.fromMap(e)).toList(); return RTCTrackEvent( receiver: RTCRtpReceiver.fromMap(map['receiver']), streams: streams, track: MediaStreamTrack.fromMap(map['track']), - transceiver: RTCRtpTransceiver.fromMap(map['transceiver'])); + transceiver: RTCRtpTransceiver.fromMap(map['transceiver'], + peerConnectionId: peerConnectionId)); } final RTCRtpReceiver receiver; final List streams; diff --git a/lib/src/rtc_video_view.dart b/lib/src/rtc_video_view.dart index 2f5efb1f04..63367715f6 100644 --- a/lib/src/rtc_video_view.dart +++ b/lib/src/rtc_video_view.dart @@ -72,17 +72,20 @@ class RTCVideoRenderer extends ValueNotifier { set srcObject(MediaStream stream) { if (textureId == null) throw 'Call initialize before setting the stream'; - _srcObject = stream; - _channel.invokeMethod('videoRendererSetSrcObject', { - 'textureId': textureId, - 'streamId': stream?.id ?? '', - 'ownerTag': stream?.ownerTag ?? '' - }).then((_) { - value = (stream == null) - ? RTCVideoValue.empty - : value.copyWith(renderVideo: renderVideo); - }); + try { + _channel.invokeMethod('videoRendererSetSrcObject', { + 'textureId': textureId, + 'streamId': stream?.id ?? '', + 'ownerTag': stream?.ownerTag ?? '' + }).then((_) { + value = (stream == null) + ? RTCVideoValue.empty + : value.copyWith(renderVideo: renderVideo); + }); + } catch (e) { + print(e.toString()); + } } @override From 3a36cd42cb3befdcb24f04909d7717310e3216cf Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Mon, 12 Oct 2020 15:13:33 +0800 Subject: [PATCH 14/26] More. --- example/lib/src/loopback_sample.dart | 26 ++++++++++++++++++++++++-- lib/src/media_stream.dart | 4 ++++ lib/src/rtc_peerconnection.dart | 4 ++-- lib/src/rtc_rtp_sender.dart | 3 ++- 4 files changed, 32 insertions(+), 5 deletions(-) diff --git a/example/lib/src/loopback_sample.dart b/example/lib/src/loopback_sample.dart index 04f4e223ad..17bba6e68d 100644 --- a/example/lib/src/loopback_sample.dart +++ b/example/lib/src/loopback_sample.dart @@ -152,8 +152,24 @@ class _MyAppState extends State { _localStream = await MediaDevices.getUserMedia(mediaConstraints); _localRenderer.srcObject = _localStream; - //await _peerConnection.addStream(_localStream); + /* old API + await _peerConnection.addStream(_localStream); + // or + var rtpSender = + await _peerConnection.createSender('audio', _localStream.id); + await rtpSender.setTrack(_localStream.getAudioTracks()[0]); + rtpSender = await _peerConnection.createSender('video', _localStream.id); + await rtpSender.setTrack(_localStream.getVideoTracks()[0]); + */ + + // Unified-Plan + _localStream.getTracks().forEach((track) { + _peerConnection.addTrack(track, [_localStream]); + }); + + // or + /* await _peerConnection.addTransceiver( track: _localStream.getAudioTracks()[0], init: RTCRtpTransceiverInit( @@ -166,7 +182,10 @@ class _MyAppState extends State { init: RTCRtpTransceiverInit( direction: TransceiverDirection.SendRecv, streams: [_localStream]), ); + */ + /* + // Unified-Plan Simulcast await _peerConnection.addTransceiver( track: _localStream.getVideoTracks()[0], init: RTCRtpTransceiverInit( @@ -205,10 +224,13 @@ class _MyAppState extends State { //change for loopback. description.type = 'answer'; await _peerConnection.setRemoteDescription(description); - /* + + /* Unfied-Plan replaceTrack var stream = await MediaDevices.getDisplayMedia(mediaConstraints); _localRenderer.srcObject = _localStream; await transceiver.sender.replaceTrack(stream.getVideoTracks()[0]); + // do re-negotiation .... + */ } catch (e) { print(e.toString()); diff --git a/lib/src/media_stream.dart b/lib/src/media_stream.dart index 6c8c8ff540..ebf1eda962 100644 --- a/lib/src/media_stream.dart +++ b/lib/src/media_stream.dart @@ -70,6 +70,10 @@ class MediaStream { } } + List getTracks() { + return [..._audioTracks, ..._videoTracks]; + } + List getAudioTracks() { return _audioTracks; } diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index 280267c45f..ff21dc2beb 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -422,13 +422,13 @@ class RTCPeerConnection { } Future addTrack(MediaStreamTrack track, - [List streamIds]) async { + [List streams]) async { try { final response = await _channel.invokeMethod('addTrack', { 'peerConnectionId': _peerConnectionId, 'trackId': track.id, - 'streamIds': streamIds + 'streamIds': streams.map((e) => e.id).toList() }); var sender = RTCRtpSender.fromMap(response); _senders.add(sender); diff --git a/lib/src/rtc_rtp_sender.dart b/lib/src/rtc_rtp_sender.dart index ea61790961..be1f741dfb 100644 --- a/lib/src/rtc_rtp_sender.dart +++ b/lib/src/rtc_rtp_sender.dart @@ -60,7 +60,8 @@ class RTCRtpSender { } } - Future setTrack(MediaStreamTrack track, bool takeOwnership) async { + Future setTrack(MediaStreamTrack track, + {bool takeOwnership = true}) async { try { await _channel.invokeMethod('rtpSenderSetTrack', { 'peerConnectionId': _peerConnectionId, From 490be788cf36297e2080f8fced9d55b201da368f Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 13 Oct 2020 00:38:18 +0800 Subject: [PATCH 15/26] Add support for iOS. --- .../webrtc/PeerConnectionObserver.java | 128 ++-- ios/Classes/FlutterRTCPeerConnection.m | 76 ++- ios/Classes/FlutterWebRTCPlugin.h | 3 + ios/Classes/FlutterWebRTCPlugin.m | 600 +++++++++++++++++- lib/src/rtc_ice_candidate.dart | 4 + lib/src/rtc_peerconnection.dart | 26 + 6 files changed, 754 insertions(+), 83 deletions(-) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index f60d01e947..cd03ae0b45 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -17,6 +17,7 @@ import java.util.Map; import java.util.List; import org.webrtc.AudioTrack; +import org.webrtc.CandidatePairChangeEvent; import org.webrtc.DataChannel; import org.webrtc.DtmfSender; import org.webrtc.IceCandidate; @@ -39,9 +40,6 @@ class PeerConnectionObserver implements PeerConnection.Observer, EventChannel.St private PeerConnection peerConnection; final Map remoteStreams = new HashMap<>(); final Map remoteTracks = new HashMap<>(); - final Map transceivers = new HashMap(); - final Map senders = new HashMap(); - final Map receivers = new HashMap(); private final StateProvider stateProvider; private final EventChannel eventChannel; private EventChannel.EventSink eventSink; @@ -88,7 +86,6 @@ void close() { void dispose() { this.close(); - peerConnection.dispose(); eventChannel.setStreamHandler(null); } @@ -134,25 +131,55 @@ void createDataChannel(String label, ConstraintsMap config, Result result) { } } - void dataChannelClose(int dataChannelId) { - DataChannel dataChannel = dataChannels.get(dataChannelId); - if (dataChannel != null) { - dataChannel.close(); - dataChannels.remove(dataChannelId); - } else { - Log.d(TAG, "dataChannelClose() dataChannel is null"); + void dataChannelClose(int dataChannelId) { + DataChannel dataChannel = dataChannels.get(dataChannelId); + if (dataChannel != null) { + dataChannel.close(); + dataChannels.remove(dataChannelId); + } else { + Log.d(TAG, "dataChannelClose() dataChannel is null"); + } } - } - void dataChannelSend(int dataChannelId, ByteBuffer byteBuffer, Boolean isBinary) { - DataChannel dataChannel = dataChannels.get(dataChannelId); - if (dataChannel != null) { - DataChannel.Buffer buffer = new DataChannel.Buffer(byteBuffer, isBinary); - dataChannel.send(buffer); - } else { - Log.d(TAG, "dataChannelSend() dataChannel is null"); + void dataChannelSend(int dataChannelId, ByteBuffer byteBuffer, Boolean isBinary) { + DataChannel dataChannel = dataChannels.get(dataChannelId); + if (dataChannel != null) { + DataChannel.Buffer buffer = new DataChannel.Buffer(byteBuffer, isBinary); + dataChannel.send(buffer); + } else { + Log.d(TAG, "dataChannelSend() dataChannel is null"); + } + } + + RtpTransceiver getRtpTransceiverById(String id) { + List transceivers = peerConnection.getTransceivers(); + for(RtpTransceiver transceiver : transceivers) { + if (id == transceiver.getMid()){ + return transceiver; + } + } + return null; + } + + RtpSender getRtpSenderById(String id) { + List senders = peerConnection.getSenders(); + for(RtpSender sender : senders) { + if (id == sender.id()){ + return sender; + } + } + return null; + } + + RtpReceiver getRtpReceiverById(String id) { + List receivers = peerConnection.getReceivers(); + for(RtpReceiver receiver : receivers) { + if (id == receiver.id()){ + return receiver; + } + } + return null; } - } void getStats(String trackId, final Result result) { MediaStreamTrack track = null; @@ -204,14 +231,24 @@ public void onIceCandidate(final IceCandidate candidate) { Log.d(TAG, "onIceCandidate"); ConstraintsMap params = new ConstraintsMap(); params.putString("event", "onCandidate"); - ConstraintsMap candidateParams = new ConstraintsMap(); - candidateParams.putInt("sdpMLineIndex", candidate.sdpMLineIndex); - candidateParams.putString("sdpMid", candidate.sdpMid); - candidateParams.putString("candidate", candidate.sdp); - params.putMap("candidate", candidateParams.toMap()); + params.putMap("candidate", candidateToMap(candidate)); sendEvent(params); } + @Override + public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) { + Log.d(TAG, "onSelectedCandidatePairChanged"); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onSelectedCandidatePairChanged"); + ConstraintsMap candidateParams = new ConstraintsMap(); + candidateParams.putInt("lastDataReceivedMs", event.lastDataReceivedMs); + candidateParams.putMap("local", candidateToMap(event.local)); + candidateParams.putMap("remote", candidateToMap(event.remote)); + candidateParams.putString("reason", event.reason); + params.putMap("candidate", candidateParams.toMap()); + sendEvent(params); + } + @Override public void onIceCandidatesRemoved(final IceCandidate[] candidates) { Log.d(TAG, "onIceCandidatesRemoved"); @@ -588,8 +625,12 @@ private RtpParameters.Encoding mapToEncoding(Map parameters) { encoding.minBitrateBps = (Integer) parameters.get("minBitrateBps"); } - if( parameters.get("minBitrateBps") != null) { - encoding.minBitrateBps = (Integer) parameters.get("minBitrateBps"); + if( parameters.get("maxBitrateBps") != null) { + encoding.maxBitrateBps = (Integer) parameters.get("maxBitrateBps"); + } + + if( parameters.get("maxFramerate") != null) { + encoding.maxFramerate = (Integer) parameters.get("maxFramerate"); } if( parameters.get("numTemporalLayers") != null) { @@ -775,14 +816,21 @@ Map transceiverToMap(RtpTransceiver transceiver){ return info.toMap(); } + Map candidateToMap(IceCandidate candidate) { + ConstraintsMap candidateParams = new ConstraintsMap(); + candidateParams.putInt("sdpMLineIndex", candidate.sdpMLineIndex); + candidateParams.putString("sdpMid", candidate.sdpMid); + candidateParams.putString("candidate", candidate.sdp); + return candidateParams.toMap(); + } + public void createSender(String kind, String streamId, Result result){ RtpSender sender = peerConnection.createSender(kind, streamId); - senders.put(sender.id(),sender); result.success(rtpSenderToMap(sender)); } public void closeSender(String senderId, Result result) { - RtpSender sender = senders.get(senderId); + RtpSender sender = getRtpSenderById(senderId); sender.dispose(); Map params = new HashMap<>(); params.put("result", true); @@ -791,12 +839,11 @@ public void closeSender(String senderId, Result result) { public void addTrack(MediaStreamTrack track, List streamIds, Result result){ RtpSender sender = peerConnection.addTrack(track, streamIds); - senders.put(sender.id(),sender); result.success(rtpSenderToMap(sender)); } public void removeTrack(String senderId, Result result){ - RtpSender sender = senders.get(senderId); + RtpSender sender = getRtpSenderById(senderId); if(sender == null){ resultError("removeTrack", "sender is null", result); return; @@ -814,9 +861,6 @@ public void addTransceiver(MediaStreamTrack track, Map transceiv } else { transceiver = peerConnection.addTransceiver(track); } - transceivers.put(transceiver.getMid(), transceiver); - senders.put(transceiver.getSender().id(), transceiver.getSender()); - receivers.put(transceiver.getReceiver().id(), transceiver.getReceiver()); result.success(transceiverToMap(transceiver)); } @@ -827,14 +871,11 @@ public void addTransceiverOfType(String mediaType, Map transceiv } else { transceiver = peerConnection.addTransceiver(stringToMediaType(mediaType)); } - transceivers.put(transceiver.getMid(), transceiver); - senders.put(transceiver.getSender().id(), transceiver.getSender()); - receivers.put(transceiver.getReceiver().id(), transceiver.getReceiver()); result.success(transceiverToMap(transceiver)); } public void rtpTransceiverSetDirection(String direction, String transceiverId, Result result) { - RtpTransceiver transceiver = transceivers.get(transceiverId); + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); if (transceiver == null) { resultError("rtpTransceiverSetDirection", "transceiver is null", result); return; @@ -844,7 +885,7 @@ public void rtpTransceiverSetDirection(String direction, String transceiverId, R } public void rtpTransceiverGetCurrentDirection(String transceiverId, Result result) { - RtpTransceiver transceiver = transceivers.get(transceiverId); + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); if (transceiver == null) { resultError("rtpTransceiverGetCurrentDirection", "transceiver is null", result); return; @@ -855,7 +896,7 @@ public void rtpTransceiverGetCurrentDirection(String transceiverId, Result resul } public void rtpTransceiverStop(String transceiverId, Result result) { - RtpTransceiver transceiver = transceivers.get(transceiverId); + RtpTransceiver transceiver = getRtpTransceiverById(transceiverId); if (transceiver == null) { resultError("rtpTransceiverStop", "transceiver is null", result); return; @@ -865,7 +906,7 @@ public void rtpTransceiverStop(String transceiverId, Result result) { } public void rtpSenderSetParameters(String rtpSenderId, Map parameters, Result result) { - RtpSender sender = senders.get(rtpSenderId); + RtpSender sender = getRtpSenderById(rtpSenderId); if (sender == null) { resultError("rtpSenderSetParameters", "sender is null", result); return; @@ -875,7 +916,7 @@ public void rtpSenderSetParameters(String rtpSenderId, Map param } public void rtpSenderSetTrack(String rtpSenderId, MediaStreamTrack track, Result result, boolean replace) { - RtpSender sender = senders.get(rtpSenderId); + RtpSender sender = getRtpSenderById(rtpSenderId); if (sender == null) { resultError("rtpSenderSetTrack", "sender is null", result); return; @@ -885,13 +926,12 @@ public void rtpSenderSetTrack(String rtpSenderId, MediaStreamTrack track, Result } public void rtpSenderDispose(String rtpSenderId, Result result) { - RtpSender sender = senders.get(rtpSenderId); + RtpSender sender = getRtpSenderById(rtpSenderId); if (sender == null) { resultError("rtpSenderDispose", "sender is null", result); return; } sender.dispose(); - senders.remove(rtpSenderId); result.success(null); } } diff --git a/ios/Classes/FlutterRTCPeerConnection.m b/ios/Classes/FlutterRTCPeerConnection.m index 6517f85ffe..a265d8e8a5 100755 --- a/ios/Classes/FlutterRTCPeerConnection.m +++ b/ios/Classes/FlutterRTCPeerConnection.m @@ -3,17 +3,7 @@ #import "FlutterRTCPeerConnection.h" #import "FlutterRTCDataChannel.h" -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import +#import @implementation RTCPeerConnection (Flutter) @@ -500,5 +490,69 @@ - (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RT } } +/** Called any time the PeerConnectionState changes. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection +didChangeConnectionState:(RTCPeerConnectionState)newState { + +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection +didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver { + +} + +/** Called when a receiver and its track are created. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection + didAddReceiver:(RTCRtpReceiver *)rtpReceiver + streams:(NSArray *)mediaStreams { + // For unified-plan + NSMutableArray* streams = [NSMutableArray array]; + for(RTCMediaStream *stream in mediaStreams) { + [streams addObject:[self mediaStreamToMap:stream ownerTag:peerConnection.flutterId]]; + } + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event": @"onTrack", + @"track": [self mediaTrackToMap:rtpReceiver.track], + @"receiver": [self receiverToMap:rtpReceiver], + @"streams": streams, + }); + } +} + +/** Called when the receiver and its track are removed. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection + didRemoveReceiver:(RTCRtpReceiver *)rtpReceiver { + +} + +/** Called when the selected ICE candidate pair is changed. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection + didChangeLocalCandidate:(RTCIceCandidate *)local + remoteCandidate:(RTCIceCandidate *)remote + lastReceivedMs:(int)lastDataReceivedMs + changeReason:(NSString *)reason { + + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onSelectedCandidatePairChanged", + @"local" : @{ + @"candidate": local.sdp, + @"sdpMLineIndex": @(local.sdpMLineIndex), + @"sdpMid": local.sdpMid + }, + @"remote" : @{ + @"candidate": remote.sdp, + @"sdpMLineIndex": @(remote.sdpMLineIndex), + @"sdpMid": remote.sdpMid + }, + @"reason": reason, + @"lastDataReceivedMs": @(lastDataReceivedMs) + }); + } +} + @end diff --git a/ios/Classes/FlutterWebRTCPlugin.h b/ios/Classes/FlutterWebRTCPlugin.h index 3be67f3518..acd1f44534 100644 --- a/ios/Classes/FlutterWebRTCPlugin.h +++ b/ios/Classes/FlutterWebRTCPlugin.h @@ -26,5 +26,8 @@ @property (nonatomic) int _targetFps; - (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId; +- (NSDictionary*)mediaStreamToMap:(RTCMediaStream *)stream ownerTag:(NSString*)ownerTag; +- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track; +- (NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver; @end diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m index c515f3b381..cb5c475289 100644 --- a/ios/Classes/FlutterWebRTCPlugin.m +++ b/ios/Classes/FlutterWebRTCPlugin.m @@ -7,8 +7,6 @@ #import #import - - @implementation FlutterWebRTCPlugin { FlutterMethodChannel *_methodChannel; id _registry; @@ -556,18 +554,305 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result } } else if ([@"setConfiguration" isEqualToString:call.method]){ NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* configuration = argsMap[@"configuration"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] peerConnection:peerConnection]; - result(nil); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* configuration = argsMap[@"configuration"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) { + [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] peerConnection:peerConnection]; + result(nil); + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] message:[NSString stringWithFormat:@"Error: peerConnection not found!"] details:nil]); + } + } else if ([@"createSender" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* kind = argsMap[@"kind"]; + NSString* streamId = argsMap[@"streamId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [peerConnection senderWithKind:kind streamId:streamId]; + result([self rtpSenderToMap:sender]); + } else if ([@"closeSender" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + + if(![peerConnection removeTrack:sender]) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: can't close sender!"] + details:nil]); + return; + } + + result(nil); + } else if ([@"addTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* trackId = argsMap[@"trackId"]; + NSArray* streamIds = argsMap[@"streamIds"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if(track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [peerConnection addTrack:track streamIds:streamIds]; + result([self rtpSenderToMap:sender]); + } else if ([@"removeTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [peerConnection removeTrack:sender]; + result(nil); + } else if ([@"addTransceiver" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* transceiverInit = argsMap[@"transceiverInit"]; + NSString* trackId = argsMap[@"trackId"]; + NSString* mediaType = argsMap[@"mediaType"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transceiver = nil; + + if(trackId != nil) { + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit *init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverWithTrack:track init:init]; + } else { + transceiver = [peerConnection addTransceiverWithTrack:track]; } - } else { + } else if (mediaType != nil) { + RTCRtpMediaType rtpMediaType = [self stringToRtpMediaType:mediaType]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit *init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverOfType:(rtpMediaType) init:init]; + } else { + transceiver = [peerConnection addTransceiverOfType:rtpMediaType]; + } + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: Incomplete parameters!"] + details:nil]); + return; + } + + if (transceiver == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: can't addTransceiver!"] + details:nil]); + return; + } + + result([self transceiverToMap:transceiver]); + } else if ([@"rtpTransceiverSetDirection" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* direction = argsMap[@"direction"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if(transcevier == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + transcevier.direction = [self stringToTransceiverDirection:direction]; + result(nil); + } else if ([@"rtpTransceiverGetCurrentDirection" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if(transcevier == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + result(@{@"result": [self transceiverDirectionString:transcevier.direction]}); + } else if ([@"rtpTransceiverStop" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if(transcevier == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + [transcevier stop]; + result(nil); + } else if ([@"rtpSenderSetParameters" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + NSDictionary* parameters = argsMap[@"parameters"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [sender setParameters:[self mapToRtpParameters:parameters]]; + + result(nil); + } else if ([@"rtpSenderReplaceTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if(track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderSetTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if(track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderDispose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [peerConnection removeTrack:sender]; + result(nil); + } else { result(FlutterMethodNotImplemented); } } @@ -628,27 +913,26 @@ -(void)mediaStreamGetTracks:(NSString*)streamId } } -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId -{ - RTCMediaStream *stream = _localStreams[streamId]; - if (!stream) { - if (peerConnectionId.length > 0) { - RTCPeerConnection *peerConnection = [_peerConnections objectForKey:peerConnectionId]; - stream = peerConnection.remoteStreams[streamId]; - } else { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - stream = peerConnection.remoteStreams[streamId]; - if (stream) { - break; - } +- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId { + RTCMediaStream *stream = nil; + if (peerConnectionId.length > 0) { + RTCPeerConnection *peerConnection = [_peerConnections objectForKey:peerConnectionId]; + stream = peerConnection.remoteStreams[streamId]; + } else { + for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { + stream = peerConnection.remoteStreams[streamId]; + if (stream) { + break; } - } + } + } + if (!stream) { + stream = _localStreams[streamId]; } return stream; } -- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId -{ +- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId { RTCMediaStreamTrack *track = _localTracks[trackId]; if (!track) { for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { @@ -658,10 +942,11 @@ - (RTCMediaStreamTrack*)trackForId:(NSString*)trackId } } } - return track; } + + - (RTCIceServer *)RTCIceServer:(id)json { if (!json) { @@ -822,4 +1107,263 @@ - (CGRect)parseRect:(NSDictionary *)rect { [[rect valueForKey:@"height"] doubleValue]); } +- (NSDictionary*)dtmfSenderToMap:(id)dtmf Id:(NSString*)Id { + return @{ + @"dtmfSenderId": Id, + @"interToneGap": @(dtmf.interToneGap / 1000.0), + @"duration": @(dtmf.duration / 1000.0), + }; +} + +- (NSDictionary*)rtpParametersToMap:(RTCRtpParameters*)parameters { + NSDictionary *rtcp = @{ + @"cname": parameters.rtcp.cname, + @"reducedSize": @(parameters.rtcp.isReducedSize), + }; + + NSMutableArray *headerExtensions = [NSMutableArray array]; + for (RTCRtpHeaderExtension* headerExtension in parameters.headerExtensions) { + [headerExtensions addObject:@{ + @"uri": headerExtension.uri, + @"encrypted": @(headerExtension.encrypted), + @"id": @(headerExtension.id), + }]; + } + + NSMutableArray *encodings = [NSMutableArray array]; + for (RTCRtpEncodingParameters* encoding in parameters.encodings) { + [encodings addObject:@{ + @"active": @(encoding.isActive), + @"minBitrateBps": encoding.minBitrateBps? encoding.minBitrateBps : @(0), + @"maxBitrateBps": encoding.maxBitrateBps? encoding.maxBitrateBps : @(0), + @"maxFramerate": encoding.maxFramerate? encoding.maxFramerate : @(30), + @"numTemporalLayers": encoding.numTemporalLayers? encoding.numTemporalLayers : @(1), + @"scaleResolutionDownBy": encoding.scaleResolutionDownBy? encoding.scaleResolutionDownBy : @(1.0), + @"ssrc": encoding.ssrc ? encoding.ssrc : 0, + @"networkPriority": encoding.networkPriority? @(encoding.networkPriority) : @(0.0) + }]; + } + + NSMutableArray *codecs = [NSMutableArray array]; + for (RTCRtpCodecParameters* codec in parameters.codecs) { + [codecs addObject:@{ + @"name": codec.name, + @"payloadType": @(codec.payloadType), + @"clockRate": codec.clockRate, + @"numChannels": codec.numChannels? codec.numChannels : @(1), + @"parameters": codec.parameters, + @"kind": codec.kind + }]; + } + + return @{ + @"transactionId": parameters.transactionId, + @"rtcp": rtcp, + @"headerExtensions": headerExtensions, + @"encodings": encodings, + @"codecs": codecs + }; +} + +-(NSString*)streamTrackStateToString:(RTCMediaStreamTrackState)state { + switch (state) { + case RTCMediaStreamTrackStateLive: + return @"live"; + case RTCMediaStreamTrackStateEnded: + return @"ended"; + default: + break; + } + return @""; +} + +- (NSDictionary*)mediaStreamToMap:(RTCMediaStream *)stream ownerTag:(NSString*)ownerTag { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCMediaStreamTrack* track in stream.audioTracks) { + [audioTracks addObject:[self mediaTrackToMap:track]]; + } + + for (RTCMediaStreamTrack* track in stream.videoTracks) { + [audioTracks addObject:[self mediaTrackToMap:track]]; + } + + return @{ + @"streamId": stream.streamId, + @"ownerTag": ownerTag, + @"audioTracks": audioTracks, + @"videoTracks":videoTracks, + + }; +} + +- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track { + if(track == nil) + return @{}; + NSDictionary *params = @{ + @"enabled": @(track.isEnabled), + @"id": track.trackId, + @"kind": track.kind, + @"label": track.trackId, + @"readyState": [self streamTrackStateToString:track.readyState], + @"remote": @(YES) + }; + return params; +} + +- (NSDictionary*)rtpSenderToMap:(RTCRtpSender *)sender { + NSDictionary *params = @{ + @"senderId": sender.senderId, + @"ownsTrack": @(YES), + @"rtpParameters": [self rtpParametersToMap:sender.parameters], + @"track": [self mediaTrackToMap:sender.track], + @"dtmfSender": [self dtmfSenderToMap:sender.dtmfSender Id:sender.senderId] + }; + return params; +} + +-(NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver { + NSDictionary *params = @{ + @"receiverId": receiver.receiverId, + @"rtpParameters": [self rtpParametersToMap:receiver.parameters], + @"track": [self mediaTrackToMap:receiver.track], + }; + return params; +} + +-(RTCRtpTransceiver*) getRtpTransceiverById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { + for( RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if([transceiver.mid isEqualToString:Id]){ + return transceiver; + } + } + return nil; +} + +-(RTCRtpSender*) getRtpSnderById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { + for( RTCRtpSender* sender in peerConnection.senders) { + if([sender.senderId isEqualToString:Id]){ + return sender; + } + } + return nil; +} + +-(RTCRtpReceiver*) getRtpReceiverById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { + for( RTCRtpReceiver* receiver in peerConnection.receivers) { + if([receiver.receiverId isEqualToString:Id]){ + return receiver; + } + } + return nil; +} + +-(RTCRtpEncodingParameters*)mapToEncoding:(NSDictionary*)map { + RTCRtpEncodingParameters *encoding = [[RTCRtpEncodingParameters alloc] init]; + encoding.isActive = YES; + encoding.scaleResolutionDownBy = [NSNumber numberWithDouble:1.0]; + encoding.numTemporalLayers = [NSNumber numberWithInt:1]; + [encoding setRid:map[@"rid"]]; + + if(map[@"active"] != nil) { + [encoding setIsActive:map[@"active"]]; + } + + if(map[@"minBitrateBps"] != nil) { + [encoding setMinBitrateBps:(NSNumber*)map[@"minBitrateBps"]]; + } + + if(map[@"maxBitrateBps"] != nil) { + [encoding setMaxBitrateBps:(NSNumber*)map[@"maxBitrateBps"]]; + } + + if(map[@"maxFramerate"] != nil) { + [encoding setMaxFramerate:(NSNumber*)map[@"maxFramerate"]]; + } + + if(map[@"numTemporalLayers"] != nil) { + [encoding setNumTemporalLayers:(NSNumber*)map[@"numTemporalLayers"]]; + } + + if(map[@"scaleResolutionDownBy"] != nil) { + [encoding setScaleResolutionDownBy:(NSNumber*)map[@"scaleResolutionDownBy"]]; + } + return encoding; +} + +-(RTCRtpTransceiverInit*)mapToTransceiverInit:(NSDictionary*)map { + NSArray* streamIds = map[@"streamIds"]; + NSArray* encodingsParams = map[@"sendEncodings"]; + NSString* direction = map[@"direction"]; + + RTCRtpTransceiverInit* init = [RTCRtpTransceiverInit alloc]; + init.direction = [self stringToTransceiverDirection:direction]; + init.streamIds = streamIds; + + if(encodingsParams != nil) { + NSMutableArray *sendEncodings = [NSMutableArray array]; + for (NSDictionary* map in encodingsParams){ + [sendEncodings addObject:[self mapToEncoding:map]]; + } + init.sendEncodings = sendEncodings; + } + return init; +} + +-(RTCRtpMediaType)stringToRtpMediaType:(NSString*)type { + if([type isEqualToString:@"audio"]) { + return RTCRtpMediaTypeAudio; + } else if([type isEqualToString:@"video"]) { + return RTCRtpMediaTypeVideo; + } else if([type isEqualToString:@"data"]) { + return RTCRtpMediaTypeData; + } + return RTCRtpMediaTypeAudio; +} + +-(RTCRtpTransceiverDirection)stringToTransceiverDirection:(NSString*)type { + if([type isEqualToString:@"sendrecv"]) { + return RTCRtpTransceiverDirectionSendRecv; + } else if([type isEqualToString:@"sendonly"]){ + return RTCRtpTransceiverDirectionSendOnly; + } else if([type isEqualToString: @"recvonly"]){ + return RTCRtpTransceiverDirectionRecvOnly; + } else if([type isEqualToString: @"inactive"]){ + return RTCRtpTransceiverDirectionInactive; + } + return RTCRtpTransceiverDirectionInactive; +} + +-(RTCRtpParameters *)mapToRtpParameters:(NSDictionary *)map { + //TODO: + return nil; +} + +-(NSString*)transceiverDirectionString:(RTCRtpTransceiverDirection)direction { + switch (direction) { + case RTCRtpTransceiverDirectionSendRecv: + return @"sendrecv"; + case RTCRtpTransceiverDirectionSendOnly: + return @"sendonly"; + case RTCRtpTransceiverDirectionRecvOnly: + return @"recvonly"; + case RTCRtpTransceiverDirectionInactive: + return @"inactive"; + } + return nil; +} + +-(NSDictionary*)transceiverToMap:(RTCRtpTransceiver*)transceiver { + NSString* mid = transceiver.mid? transceiver.mid : @""; + NSDictionary* params = @{ + @"transceiverId": mid, + @"mid": mid, + @"direction": [self transceiverDirectionString:transceiver.direction], + @"sender": [self rtpSenderToMap:transceiver.sender], + @"receiver": [self receiverToMap:transceiver.receiver] + }; + return params; +} + @end diff --git a/lib/src/rtc_ice_candidate.dart b/lib/src/rtc_ice_candidate.dart index d9ba2d9606..7357833c26 100644 --- a/lib/src/rtc_ice_candidate.dart +++ b/lib/src/rtc_ice_candidate.dart @@ -1,5 +1,9 @@ class RTCIceCandidate { RTCIceCandidate(this.candidate, this.sdpMid, this.sdpMlineIndex); + factory RTCIceCandidate.fromMap(Map map) { + return RTCIceCandidate( + map['candidate'], map['sdpMid'], map['sdpMLineIndex']); + } final String candidate; final String sdpMid; final int sdpMlineIndex; diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index ff21dc2beb..b2b67f2a11 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -206,6 +206,32 @@ class RTCPeerConnection { track: MediaStreamTrack.fromMap(map['track']), streams: streams)); break; + + /// Other + case 'onSelectedCandidatePairChanged': + + /// class RTCIceCandidatePair { + /// RTCIceCandidatePair(this.local, this.remote, this.lastReceivedMs, this.reason); + /// factory RTCIceCandidatePair.fromMap(Map map) { + /// return RTCIceCandidatePair( + /// RTCIceCandidate.fromMap(map['local']), + /// RTCIceCandidate.fromMap(map['remote']), + /// map['lastReceivedMs'], + /// map['reason']); + /// } + /// RTCIceCandidate local; + /// RTCIceCandidate remote; + /// int lastReceivedMs; + /// String reason; + /// } + /// + /// typedef SelectedCandidatePairChangedCallback = void Function(RTCIceCandidatePair pair); + /// SelectedCandidatePairChangedCallback onSelectedCandidatePairChanged; + /// + /// RTCIceCandidatePair iceCandidatePair = RTCIceCandidatePair.fromMap(map); + /// onSelectedCandidatePairChanged?.call(iceCandidatePair); + + break; } } From 587f10fc1f051f6c7d7daf58d14dd4070cebe8a2 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 13 Oct 2020 00:54:56 +0800 Subject: [PATCH 16/26] Optimized code. --- ios/Classes/FlutterRTCVideoRenderer.h | 2 +- ios/Classes/FlutterRTCVideoRenderer.m | 18 ++---------------- ios/Classes/FlutterWebRTCPlugin.m | 23 ++++++++++++++++++++--- 3 files changed, 23 insertions(+), 20 deletions(-) diff --git a/ios/Classes/FlutterRTCVideoRenderer.h b/ios/Classes/FlutterRTCVideoRenderer.h index d1bd7b8675..96dcd2203d 100755 --- a/ios/Classes/FlutterRTCVideoRenderer.h +++ b/ios/Classes/FlutterRTCVideoRenderer.h @@ -28,6 +28,6 @@ - (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry messenger:(NSObject*)messenger; --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view peerConnectionId:(NSString *)peerConnectionId; +-(void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack; @end diff --git a/ios/Classes/FlutterRTCVideoRenderer.m b/ios/Classes/FlutterRTCVideoRenderer.m index 1f112b24ae..234849ee2c 100755 --- a/ios/Classes/FlutterRTCVideoRenderer.m +++ b/ios/Classes/FlutterRTCVideoRenderer.m @@ -261,22 +261,8 @@ - (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id Date: Tue, 13 Oct 2020 02:32:15 +0800 Subject: [PATCH 17/26] Fix simulcast for iOS. --- example/lib/src/loopback_sample.dart | 18 ++++++++++-------- ios/Classes/FlutterWebRTCPlugin.m | 23 ++++++++++++----------- ios/flutter_webrtc.podspec | 2 +- 3 files changed, 23 insertions(+), 20 deletions(-) diff --git a/example/lib/src/loopback_sample.dart b/example/lib/src/loopback_sample.dart index 17bba6e68d..e52f65f8ac 100644 --- a/example/lib/src/loopback_sample.dart +++ b/example/lib/src/loopback_sample.dart @@ -72,7 +72,7 @@ class _MyAppState extends State { void _onAddStream(MediaStream stream) { print('New stream: ' + stream.id); - _remoteRenderer.srcObject = stream; + //_remoteRenderer.srcObject = stream; } void _onRemoveStream(MediaStream stream) { @@ -162,20 +162,20 @@ class _MyAppState extends State { rtpSender = await _peerConnection.createSender('video', _localStream.id); await rtpSender.setTrack(_localStream.getVideoTracks()[0]); */ - + /* // Unified-Plan _localStream.getTracks().forEach((track) { _peerConnection.addTrack(track, [_localStream]); }); - + */ // or - /* + await _peerConnection.addTransceiver( track: _localStream.getAudioTracks()[0], init: RTCRtpTransceiverInit( direction: TransceiverDirection.SendRecv, streams: [_localStream]), ); - + /* // ignore: unused_local_variable var transceiver = await _peerConnection.addTransceiver( track: _localStream.getVideoTracks()[0], @@ -184,32 +184,34 @@ class _MyAppState extends State { ); */ - /* // Unified-Plan Simulcast await _peerConnection.addTransceiver( track: _localStream.getVideoTracks()[0], init: RTCRtpTransceiverInit( - direction: TransceiverDirection.SendOnly, + direction: TransceiverDirection.SendRecv, streams: [_localStream], sendEncodings: [ // for firefox order matters... first high resolution, then scaled resolutions... RTCRtpEncoding( rid: 'f', + maxBitrateBps: 900000, numTemporalLayers: 3, ), RTCRtpEncoding( rid: 'h', numTemporalLayers: 3, + maxBitrateBps: 300000, scaleResolutionDownBy: 2.0, ), RTCRtpEncoding( rid: 'q', numTemporalLayers: 3, + maxBitrateBps: 100000, scaleResolutionDownBy: 4.0, ), ], )); - + /* await _peerConnection.addTransceiver( kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); await _peerConnection.addTransceiver( diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m index 596fb45fa8..9bc41767c9 100644 --- a/ios/Classes/FlutterWebRTCPlugin.m +++ b/ios/Classes/FlutterWebRTCPlugin.m @@ -47,7 +47,7 @@ - (instancetype)initWithChannel:(FlutterMethodChannel *)channel _speakerOn = NO; self.viewController = viewController; } - + //RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose); RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init]; RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init]; @@ -730,7 +730,7 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result details:nil]); return; } - transcevier.direction = [self stringToTransceiverDirection:direction]; + [transcevier setDirection:[self stringToTransceiverDirection:direction] error:nil]; result(nil); } else if ([@"rtpTransceiverGetCurrentDirection" isEqualToString:call.method]){ NSDictionary* argsMap = call.arguments; @@ -769,7 +769,7 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result details:nil]); return; } - [transcevier stop]; + [transcevier stopInternal]; result(nil); } else if ([@"rtpSenderSetParameters" isEqualToString:call.method]){ NSDictionary* argsMap = call.arguments; @@ -1151,13 +1151,12 @@ - (NSDictionary*)rtpParametersToMap:(RTCRtpParameters*)parameters { for (RTCRtpEncodingParameters* encoding in parameters.encodings) { [encodings addObject:@{ @"active": @(encoding.isActive), - @"minBitrateBps": encoding.minBitrateBps? encoding.minBitrateBps : @(0), - @"maxBitrateBps": encoding.maxBitrateBps? encoding.maxBitrateBps : @(0), + @"minBitrateBps": encoding.minBitrateBps? encoding.minBitrateBps : [NSNumber numberWithInt:0], + @"maxBitrateBps": encoding.maxBitrateBps? encoding.maxBitrateBps : [NSNumber numberWithInt:0], @"maxFramerate": encoding.maxFramerate? encoding.maxFramerate : @(30), @"numTemporalLayers": encoding.numTemporalLayers? encoding.numTemporalLayers : @(1), - @"scaleResolutionDownBy": encoding.scaleResolutionDownBy? encoding.scaleResolutionDownBy : @(1.0), - @"ssrc": encoding.ssrc ? encoding.ssrc : 0, - @"networkPriority": encoding.networkPriority? @(encoding.networkPriority) : @(0.0) + @"scaleResolutionDownBy": encoding.scaleResolutionDownBy? @(encoding.scaleResolutionDownBy.doubleValue) : [NSNumber numberWithDouble:1.0], + @"ssrc": encoding.ssrc ? encoding.ssrc : [NSNumber numberWithLong:0] }]; } @@ -1281,6 +1280,8 @@ -(RTCRtpEncodingParameters*)mapToEncoding:(NSDictionary*)map { encoding.isActive = YES; encoding.scaleResolutionDownBy = [NSNumber numberWithDouble:1.0]; encoding.numTemporalLayers = [NSNumber numberWithInt:1]; + encoding.networkPriority = RTCPriorityLow; + encoding.bitratePriority = 1.0; [encoding setRid:map[@"rid"]]; if(map[@"active"] != nil) { @@ -1319,11 +1320,11 @@ -(RTCRtpTransceiverInit*)mapToTransceiverInit:(NSDictionary*)map { init.streamIds = streamIds; if(encodingsParams != nil) { - NSMutableArray *sendEncodings = [NSMutableArray array]; + NSArray *sendEncodings = [[NSArray alloc] init]; for (NSDictionary* map in encodingsParams){ - [sendEncodings addObject:[self mapToEncoding:map]]; + sendEncodings = [sendEncodings arrayByAddingObject:[self mapToEncoding:map]]; } - init.sendEncodings = sendEncodings; + [init setSendEncodings:sendEncodings]; } return init; } diff --git a/ios/flutter_webrtc.podspec b/ios/flutter_webrtc.podspec index aa19eea685..162c3e99e1 100644 --- a/ios/flutter_webrtc.podspec +++ b/ios/flutter_webrtc.podspec @@ -16,7 +16,7 @@ A new flutter plugin project. s.public_header_files = 'Classes/**/*.h' s.dependency 'Flutter' s.dependency 'Libyuv', '1703' - s.dependency 'GoogleWebRTC', '1.1.29400' + s.dependency 'GoogleWebRTC', '1.1.31999' s.ios.deployment_target = '10.0' s.static_framework = true end From 7464f92e746423d8f91a94319f457181d1ab9dbd Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 13 Oct 2020 02:39:10 +0800 Subject: [PATCH 18/26] Update. --- example/lib/src/loopback_sample.dart | 10 ++++++---- ios/Classes/FlutterWebRTCPlugin.m | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/example/lib/src/loopback_sample.dart b/example/lib/src/loopback_sample.dart index e52f65f8ac..87dcd4b55c 100644 --- a/example/lib/src/loopback_sample.dart +++ b/example/lib/src/loopback_sample.dart @@ -188,7 +188,7 @@ class _MyAppState extends State { await _peerConnection.addTransceiver( track: _localStream.getVideoTracks()[0], init: RTCRtpTransceiverInit( - direction: TransceiverDirection.SendRecv, + direction: TransceiverDirection.SendOnly, streams: [_localStream], sendEncodings: [ // for firefox order matters... first high resolution, then scaled resolutions... @@ -211,14 +211,16 @@ class _MyAppState extends State { ), ], )); - /* + await _peerConnection.addTransceiver( kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); await _peerConnection.addTransceiver( kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); await _peerConnection.addTransceiver( - kind: RTCRtpMediaType.RTCRtpMediaTypeVideo); - */ + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, + init: + RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + var description = await _peerConnection.createOffer(offerSdpConstraints); var sdp = description.sdp; print('sdp = $sdp'); diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m index 9bc41767c9..1dfd0af07b 100644 --- a/ios/Classes/FlutterWebRTCPlugin.m +++ b/ios/Classes/FlutterWebRTCPlugin.m @@ -1285,7 +1285,7 @@ -(RTCRtpEncodingParameters*)mapToEncoding:(NSDictionary*)map { [encoding setRid:map[@"rid"]]; if(map[@"active"] != nil) { - [encoding setIsActive:map[@"active"]]; + [encoding setIsActive:((NSNumber*)map[@"active"]).boolValue]; } if(map[@"minBitrateBps"] != nil) { From 7bc2c5e0d9a82160a059edaca0f3665e362756fd Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 13 Oct 2020 02:43:43 +0800 Subject: [PATCH 19/26] Upgrade GoogleWebRTC@android to 1.0.32006. --- android/build.gradle | 4 ++-- example/scripts/project_tools.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/android/build.gradle b/android/build.gradle index b30a3d8f14..411a9bdda4 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -25,7 +25,7 @@ android { compileSdkVersion 28 defaultConfig { - minSdkVersion 18 + minSdkVersion 21 testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" consumerProguardFiles 'proguard-rules.pro' } @@ -41,6 +41,6 @@ android { } dependencies { - api 'org.webrtc:google-webrtc:1.0.30039' + api 'org.webrtc:google-webrtc:1.0.32006' implementation "androidx.annotation:annotation:1.1.0" } diff --git a/example/scripts/project_tools.sh b/example/scripts/project_tools.sh index 355266f7e5..130e561006 100755 --- a/example/scripts/project_tools.sh +++ b/example/scripts/project_tools.sh @@ -37,7 +37,7 @@ function add_permission_label() { echo "" echo "Add permission labels to AndroidManifest.xml." echo "" - python add-line.py -i ../android/app/build.gradle -s 'minSdkVersion 16' -t 'minSdkVersion 18' -r + python add-line.py -i ../android/app/build.gradle -s 'minSdkVersion 16' -t 'minSdkVersion 21' -r python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' python add-line.py -i ../android/app/src/main/AndroidManifest.xml -s "' From a6b639eae2b6bc1a0c0094ef02573aabfe7bc234 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 13 Oct 2020 02:51:15 +0800 Subject: [PATCH 20/26] Update. --- example/lib/src/loopback_sample.dart | 6 +++--- lib/src/rtc_dtmf_sender.dart | 4 +++- lib/src/rtc_peerconnection.dart | 2 +- lib/src/rtc_rtp_sender.dart | 2 +- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/example/lib/src/loopback_sample.dart b/example/lib/src/loopback_sample.dart index 87dcd4b55c..b32b9c763e 100644 --- a/example/lib/src/loopback_sample.dart +++ b/example/lib/src/loopback_sample.dart @@ -175,15 +175,15 @@ class _MyAppState extends State { init: RTCRtpTransceiverInit( direction: TransceiverDirection.SendRecv, streams: [_localStream]), ); - /* + // ignore: unused_local_variable var transceiver = await _peerConnection.addTransceiver( track: _localStream.getVideoTracks()[0], init: RTCRtpTransceiverInit( direction: TransceiverDirection.SendRecv, streams: [_localStream]), ); - */ + /* // Unified-Plan Simulcast await _peerConnection.addTransceiver( track: _localStream.getVideoTracks()[0], @@ -220,7 +220,7 @@ class _MyAppState extends State { kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); - + */ var description = await _peerConnection.createOffer(offerSdpConstraints); var sdp = description.sdp; print('sdp = $sdp'); diff --git a/lib/src/rtc_dtmf_sender.dart b/lib/src/rtc_dtmf_sender.dart index 2eec41fada..057071af84 100644 --- a/lib/src/rtc_dtmf_sender.dart +++ b/lib/src/rtc_dtmf_sender.dart @@ -3,8 +3,9 @@ import 'package:flutter/services.dart'; import 'utils.dart'; class RTCDTMFSender { - RTCDTMFSender(this._peerConnectionId); + RTCDTMFSender(this._peerConnectionId, this._rtpSenderId); // peer connection Id must be defined as a variable where this function will be called. + final String _rtpSenderId; final String _peerConnectionId; final MethodChannel _channel = WebRTC.methodChannel(); @@ -22,6 +23,7 @@ class RTCDTMFSender { {int duration = 100, int interToneGap = 70}) async { await _channel.invokeMethod('sendDtmf', { 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _rtpSenderId, 'tone': tones, 'duration': duration, 'gap': interToneGap, diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/rtc_peerconnection.dart index b2b67f2a11..46dccd31ee 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/rtc_peerconnection.dart @@ -421,7 +421,7 @@ class RTCPeerConnection { } RTCDTMFSender createDtmfSender(MediaStreamTrack track) { - return RTCDTMFSender(_peerConnectionId); + return RTCDTMFSender(_peerConnectionId, ''); } /// Unified-Plan. diff --git a/lib/src/rtc_rtp_sender.dart b/lib/src/rtc_rtp_sender.dart index be1f741dfb..f5d0524c7f 100644 --- a/lib/src/rtc_rtp_sender.dart +++ b/lib/src/rtc_rtp_sender.dart @@ -15,7 +15,7 @@ class RTCRtpSender { return RTCRtpSender( map['senderId'], MediaStreamTrack.fromMap(map['track']), - RTCDTMFSender(peerConnectionId), + RTCDTMFSender(peerConnectionId, map['senderId']), RTCRtpParameters.fromMap(map['rtpParameters']), map['ownsTrack'], peerConnectionId); From 7cd1c9cf67e4adcc4ece40d9256e898693702d67 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 13 Oct 2020 15:49:47 +0800 Subject: [PATCH 21/26] Merge changes from master. --- example/lib/src/loopback_sample.dart | 3 +- lib/flutter_webrtc.dart | 51 ++--- lib/src/{ => interface}/enums.dart | 50 ++++- lib/src/interface/factory.dart | 19 ++ lib/src/interface/media_recorder.dart | 22 ++ lib/src/interface/media_stream.dart | 34 ++++ lib/src/interface/media_stream_track.dart | 39 ++++ lib/src/interface/mediadevices.dart | 7 + lib/src/interface/navigator.dart | 9 + .../{web => interface}/rtc_data_channel.dart | 78 ++----- .../{web => interface}/rtc_dtmf_sender.dart | 12 +- .../{ => interface}/rtc_ice_candidate.dart | 4 - lib/src/interface/rtc_peerconnection.dart | 115 +++++++++++ .../{ => interface}/rtc_rtcp_parameters.dart | 0 .../{ => interface}/rtc_rtp_parameters.dart | 0 lib/src/interface/rtc_rtp_receiver.dart | 26 +++ lib/src/interface/rtc_rtp_sender.dart | 30 +++ lib/src/interface/rtc_rtp_transceiver.dart | 44 ++++ .../rtc_session_description.dart | 0 lib/src/interface/rtc_stats_report.dart | 7 + lib/src/interface/rtc_track_event.dart | 12 ++ lib/src/interface/rtc_video_renderer.dart | 66 ++++++ lib/src/media_devices.dart | 23 +++ lib/src/media_recorder.dart | 55 ++--- lib/src/native/factory_impl.dart | 68 +++++++ lib/src/native/media_recorder_impl.dart | 44 ++++ .../media_stream_impl.dart} | 51 +++-- .../media_stream_track_impl.dart} | 36 ++-- .../mediadevices_impl.dart} | 25 ++- lib/src/native/navigator_impl.dart | 24 +++ lib/src/native/rtc_data_channel_impl.dart | 103 ++++++++++ lib/src/native/rtc_dtmf_sender_impl.dart | 22 ++ .../native/rtc_peerconnection_factory.dart | 29 +++ .../rtc_peerconnection_impl.dart} | 159 ++++++++------- lib/src/native/rtc_rtp_receiver_impl.dart | 36 ++++ .../rtc_rtp_sender_impl.dart} | 33 ++- .../rtc_rtp_transceiver_impl.dart} | 93 +++++---- lib/src/native/rtc_track_event_impl.dart | 32 +++ lib/src/native/rtc_video_renderer_impl.dart | 92 +++++++++ lib/src/native/rtc_video_view_impl.dart | 67 ++++++ lib/src/{ => native}/utils.dart | 0 lib/src/rtc_data_channel.dart | 184 ----------------- lib/src/rtc_dtmf_sender.dart | 37 ---- lib/src/rtc_peerconnection_factory.dart | 38 ---- lib/src/rtc_rtp_receiver.dart | 54 ----- lib/src/rtc_stats_report.dart | 7 - lib/src/rtc_track_event.dart | 23 --- lib/src/rtc_video_renderer.dart | 28 +++ lib/src/rtc_video_view.dart | 181 ----------------- lib/src/web/factory_impl.dart | 59 ++++++ ...recorder.dart => media_recorder_impl.dart} | 19 +- lib/src/web/media_stream.dart | 47 ----- lib/src/web/media_stream_impl.dart | 58 ++++++ ...rack.dart => media_stream_track_impl.dart} | 42 +++- ...user_media.dart => mediadevices_impl.dart} | 21 +- lib/src/web/navigator_impl.dart | 24 +++ lib/src/web/rtc_data_channel_impl.dart | 76 +++++++ lib/src/web/rtc_dtmf_sender_impl.dart | 14 ++ lib/src/web/rtc_ice_candidate.dart | 22 -- lib/src/web/rtc_peerconnection_factory.dart | 40 ++-- ...tion.dart => rtc_peerconnection_impl.dart} | 191 ++++++++++++------ lib/src/web/rtc_session_description.dart | 16 -- ...view.dart => rtc_video_renderer_impl.dart} | 123 ++--------- lib/src/web/rtc_video_view_impl.dart | 58 ++++++ lib/src/web/utils.dart | 4 + 65 files changed, 1848 insertions(+), 1138 deletions(-) rename lib/src/{ => interface}/enums.dart (79%) create mode 100644 lib/src/interface/factory.dart create mode 100644 lib/src/interface/media_recorder.dart create mode 100644 lib/src/interface/media_stream.dart create mode 100644 lib/src/interface/media_stream_track.dart create mode 100644 lib/src/interface/mediadevices.dart create mode 100644 lib/src/interface/navigator.dart rename lib/src/{web => interface}/rtc_data_channel.dart (52%) rename lib/src/{web => interface}/rtc_dtmf_sender.dart (77%) rename lib/src/{ => interface}/rtc_ice_candidate.dart (65%) create mode 100644 lib/src/interface/rtc_peerconnection.dart rename lib/src/{ => interface}/rtc_rtcp_parameters.dart (100%) rename lib/src/{ => interface}/rtc_rtp_parameters.dart (100%) create mode 100644 lib/src/interface/rtc_rtp_receiver.dart create mode 100644 lib/src/interface/rtc_rtp_sender.dart create mode 100644 lib/src/interface/rtc_rtp_transceiver.dart rename lib/src/{ => interface}/rtc_session_description.dart (100%) create mode 100644 lib/src/interface/rtc_stats_report.dart create mode 100644 lib/src/interface/rtc_track_event.dart create mode 100644 lib/src/interface/rtc_video_renderer.dart create mode 100644 lib/src/media_devices.dart create mode 100644 lib/src/native/factory_impl.dart create mode 100644 lib/src/native/media_recorder_impl.dart rename lib/src/{media_stream.dart => native/media_stream_impl.dart} (64%) rename lib/src/{media_stream_track.dart => native/media_stream_track_impl.dart} (78%) rename lib/src/{get_user_media.dart => native/mediadevices_impl.dart} (77%) create mode 100644 lib/src/native/navigator_impl.dart create mode 100644 lib/src/native/rtc_data_channel_impl.dart create mode 100644 lib/src/native/rtc_dtmf_sender_impl.dart create mode 100644 lib/src/native/rtc_peerconnection_factory.dart rename lib/src/{rtc_peerconnection.dart => native/rtc_peerconnection_impl.dart} (84%) create mode 100644 lib/src/native/rtc_rtp_receiver_impl.dart rename lib/src/{rtc_rtp_sender.dart => native/rtc_rtp_sender_impl.dart} (76%) rename lib/src/{rtc_rtp_transceiver.dart => native/rtc_rtp_transceiver_impl.dart} (57%) create mode 100644 lib/src/native/rtc_track_event_impl.dart create mode 100644 lib/src/native/rtc_video_renderer_impl.dart create mode 100644 lib/src/native/rtc_video_view_impl.dart rename lib/src/{ => native}/utils.dart (100%) delete mode 100644 lib/src/rtc_data_channel.dart delete mode 100644 lib/src/rtc_dtmf_sender.dart delete mode 100644 lib/src/rtc_peerconnection_factory.dart delete mode 100644 lib/src/rtc_rtp_receiver.dart delete mode 100644 lib/src/rtc_stats_report.dart delete mode 100644 lib/src/rtc_track_event.dart create mode 100644 lib/src/rtc_video_renderer.dart delete mode 100644 lib/src/rtc_video_view.dart create mode 100644 lib/src/web/factory_impl.dart rename lib/src/web/{media_recorder.dart => media_recorder_impl.dart} (79%) delete mode 100644 lib/src/web/media_stream.dart create mode 100644 lib/src/web/media_stream_impl.dart rename lib/src/web/{media_stream_track.dart => media_stream_track_impl.dart} (71%) rename lib/src/web/{get_user_media.dart => mediadevices_impl.dart} (79%) create mode 100644 lib/src/web/navigator_impl.dart create mode 100644 lib/src/web/rtc_data_channel_impl.dart create mode 100644 lib/src/web/rtc_dtmf_sender_impl.dart delete mode 100644 lib/src/web/rtc_ice_candidate.dart rename lib/src/web/{rtc_peerconnection.dart => rtc_peerconnection_impl.dart} (58%) delete mode 100644 lib/src/web/rtc_session_description.dart rename lib/src/web/{rtc_video_view.dart => rtc_video_renderer_impl.dart} (60%) create mode 100644 lib/src/web/rtc_video_view_impl.dart diff --git a/example/lib/src/loopback_sample.dart b/example/lib/src/loopback_sample.dart index b32b9c763e..fe08b29279 100644 --- a/example/lib/src/loopback_sample.dart +++ b/example/lib/src/loopback_sample.dart @@ -150,7 +150,8 @@ class _MyAppState extends State { _peerConnection.onTrack = _onTrack; - _localStream = await MediaDevices.getUserMedia(mediaConstraints); + _localStream = + await navigator.mediaDevices.getUserMedia(mediaConstraints); _localRenderer.srcObject = _localStream; /* old API diff --git a/lib/flutter_webrtc.dart b/lib/flutter_webrtc.dart index 1bc2671725..fd5d1a3dc6 100644 --- a/lib/flutter_webrtc.dart +++ b/lib/flutter_webrtc.dart @@ -1,32 +1,25 @@ library flutter_webrtc; -export 'src/enums.dart'; -export 'src/get_user_media.dart' - if (dart.library.html) 'src/web/get_user_media.dart'; -export 'src/media_recorder.dart' - if (dart.library.html) 'src/web/media_recorder.dart'; -export 'src/media_stream.dart' - if (dart.library.html) 'src/web/media_stream.dart'; -export 'src/media_stream_track.dart' - if (dart.library.html) 'src/web/media_stream_track.dart'; -export 'src/rtc_data_channel.dart' - if (dart.library.html) 'src/web/rtc_data_channel.dart'; -export 'src/rtc_dtmf_sender.dart' - if (dart.library.html) 'src/web/rtc_dtmf_sender.dart'; -export 'src/rtc_ice_candidate.dart' - if (dart.library.html) 'src/web/rtc_ice_candidate.dart'; -export 'src/rtc_peerconnection.dart' - if (dart.library.html) 'src/web/rtc_peerconnection.dart'; -export 'src/rtc_peerconnection_factory.dart' +export 'src/interface/enums.dart'; +export 'src/interface/media_stream.dart'; +export 'src/interface/media_stream_track.dart'; +export 'src/interface/rtc_data_channel.dart'; +export 'src/interface/rtc_dtmf_sender.dart'; +export 'src/interface/rtc_ice_candidate.dart'; +export 'src/interface/rtc_peerconnection.dart'; +export 'src/interface/rtc_rtcp_parameters.dart'; +export 'src/interface/rtc_rtp_parameters.dart'; +export 'src/interface/rtc_rtp_receiver.dart'; +export 'src/interface/rtc_rtp_sender.dart'; +export 'src/interface/rtc_rtp_transceiver.dart'; +export 'src/interface/rtc_session_description.dart'; +export 'src/interface/rtc_stats_report.dart'; +export 'src/interface/rtc_track_event.dart'; +export 'src/media_devices.dart'; +export 'src/media_recorder.dart'; +export 'src/native/rtc_peerconnection_factory.dart' if (dart.library.html) 'src/web/rtc_peerconnection_factory.dart'; -export 'src/rtc_rtp_parameters.dart'; -export 'src/rtc_rtp_receiver.dart'; -export 'src/rtc_rtp_sender.dart'; -export 'src/rtc_rtp_transceiver.dart'; -export 'src/rtc_session_description.dart' - if (dart.library.html) 'src/web/rtc_session_description.dart'; -export 'src/rtc_stats_report.dart'; -export 'src/rtc_track_event.dart'; -export 'src/rtc_video_view.dart' - if (dart.library.html) 'src/web/rtc_video_view.dart'; -export 'src/utils.dart' if (dart.library.html) 'src/web/utils.dart'; +export 'src/native/rtc_video_view_impl.dart' + if (dart.library.html) 'src/web/rtc_video_view_impl.dart'; +export 'src/native/utils.dart' if (dart.library.html) 'src/web/utils.dart'; +export 'src/rtc_video_renderer.dart'; diff --git a/lib/src/enums.dart b/lib/src/interface/enums.dart similarity index 79% rename from lib/src/enums.dart rename to lib/src/interface/enums.dart index a6a13277a6..017345d606 100644 --- a/lib/src/enums.dart +++ b/lib/src/interface/enums.dart @@ -19,6 +19,12 @@ enum RTCSignalingState { RTCSignalingStateClosed } +enum RTCIceGatheringState { + RTCIceGatheringStateNew, + RTCIceGatheringStateGathering, + RTCIceGatheringStateComplete +} + enum RTCPeerConnectionState { RTCPeerConnectionStateClosed, RTCPeerConnectionStateFailed, @@ -28,12 +34,6 @@ enum RTCPeerConnectionState { RTCPeerConnectionStateConnected } -enum RTCIceGatheringState { - RTCIceGatheringStateNew, - RTCIceGatheringStateGathering, - RTCIceGatheringStateComplete -} - enum RTCIceConnectionState { RTCIceConnectionStateNew, RTCIceConnectionStateChecking, @@ -49,6 +49,44 @@ enum RTCVideoViewObjectFit { RTCVideoViewObjectFitContain, RTCVideoViewObjectFitCover, } +enum RTCRtpMediaType { + RTCRtpMediaTypeAudio, + RTCRtpMediaTypeVideo, + RTCRtpMediaTypeData, +} + +final typeRTCRtpMediaTypetoString = { + RTCRtpMediaType.RTCRtpMediaTypeAudio: 'audio', + RTCRtpMediaType.RTCRtpMediaTypeVideo: 'video', + RTCRtpMediaType.RTCRtpMediaTypeData: 'data', +}; + +final typeStringToRTCRtpMediaType = { + 'audio': RTCRtpMediaType.RTCRtpMediaTypeAudio, + 'video': RTCRtpMediaType.RTCRtpMediaTypeVideo, + 'data': RTCRtpMediaType.RTCRtpMediaTypeData, +}; + +enum TransceiverDirection { + SendRecv, + SendOnly, + RecvOnly, + Inactive, +} + +final typeStringToRtpTransceiverDirection = { + 'sendrecv': TransceiverDirection.SendRecv, + 'sendonly': TransceiverDirection.SendOnly, + 'recvonly': TransceiverDirection.RecvOnly, + 'inactive': TransceiverDirection.Inactive, +}; + +final typeRtpTransceiverDirectionToString = { + TransceiverDirection.SendRecv: 'sendrecv', + TransceiverDirection.SendOnly: 'sendonly', + TransceiverDirection.RecvOnly: 'recvonly', + TransceiverDirection.Inactive: 'inactive', +}; RTCIceConnectionState iceConnectionStateForString(String state) { switch (state) { diff --git a/lib/src/interface/factory.dart b/lib/src/interface/factory.dart new file mode 100644 index 0000000000..876e5a6609 --- /dev/null +++ b/lib/src/interface/factory.dart @@ -0,0 +1,19 @@ +import 'media_recorder.dart'; +import 'media_stream.dart'; +import 'navigator.dart'; +import 'rtc_peerconnection.dart'; +import 'rtc_video_renderer.dart'; + +abstract class RTCFactory { + Future createPeerConnection( + Map configuration, + [Map constraints]); + + Future createLocalMediaStream(String label); + + Navigator get navigator; + + MediaRecorder mediaRecorder(); + + VideoRenderer videoRenderer(); +} diff --git a/lib/src/interface/media_recorder.dart b/lib/src/interface/media_recorder.dart new file mode 100644 index 0000000000..67be812d7a --- /dev/null +++ b/lib/src/interface/media_recorder.dart @@ -0,0 +1,22 @@ +import 'enums.dart'; +import 'media_stream.dart'; +import 'media_stream_track.dart'; + +abstract class MediaRecorder { + /// For Android use audioChannel param + /// For iOS use audioTrack + Future start( + String path, { + MediaStreamTrack videoTrack, + RecorderAudioChannel audioChannel, + }); + + /// Only for Flutter Web + void startWeb( + MediaStream stream, { + Function(dynamic blob, bool isLastOne) onDataChunk, + String mimeType, + }); + + Future stop(); +} diff --git a/lib/src/interface/media_stream.dart b/lib/src/interface/media_stream.dart new file mode 100644 index 0000000000..ecbaba708a --- /dev/null +++ b/lib/src/interface/media_stream.dart @@ -0,0 +1,34 @@ +import 'media_stream_track.dart'; + +typedef MediaTrackCallback = void Function(MediaStreamTrack track); + +abstract class MediaStream { + MediaStream(this._id, this._ownerTag); + final String _id; + final String _ownerTag; + + MediaTrackCallback onAddTrack; + + MediaTrackCallback onRemoveTrack; + + String get id => _id; + + String get ownerTag => _ownerTag; + + Future getMediaTracks(); + + Future addTrack(MediaStreamTrack track, {bool addToNative = true}); + + Future removeTrack(MediaStreamTrack track, + {bool removeFromNative = true}); + + List getTracks(); + + List getAudioTracks(); + + List getVideoTracks(); + + Future dispose() async { + return Future.value(); + } +} diff --git a/lib/src/interface/media_stream_track.dart b/lib/src/interface/media_stream_track.dart new file mode 100644 index 0000000000..17ed0076d1 --- /dev/null +++ b/lib/src/interface/media_stream_track.dart @@ -0,0 +1,39 @@ +typedef StreamTrackCallback = Function(); + +abstract class MediaStreamTrack { + MediaStreamTrack(); + + StreamTrackCallback onEnded; + + StreamTrackCallback onMute; + + bool get enabled; + set enabled(bool b); + + String get label; + + String get kind; + + String get id; + + ///Future contains isFrontCamera + ///Throws error if switching camera failed + Future switchCamera(); + + Future adaptRes(int width, int height); + + void setVolume(double volume); + + void setMicrophoneMute(bool mute); + + void enableSpeakerphone(bool enable); + + Future captureFrame([String filePath]); + + Future hasTorch(); + Future setTorch(bool torch); + + Future dispose() { + return Future.value(); + } +} diff --git a/lib/src/interface/mediadevices.dart b/lib/src/interface/mediadevices.dart new file mode 100644 index 0000000000..b32fdb7f4f --- /dev/null +++ b/lib/src/interface/mediadevices.dart @@ -0,0 +1,7 @@ +import 'media_stream.dart'; + +abstract class MediaDevices { + Future getUserMedia(Map mediaConstraints); + Future getDisplayMedia(Map mediaConstraints); + Future> getSources(); +} diff --git a/lib/src/interface/navigator.dart b/lib/src/interface/navigator.dart new file mode 100644 index 0000000000..aa5c48f9d6 --- /dev/null +++ b/lib/src/interface/navigator.dart @@ -0,0 +1,9 @@ +import 'media_stream.dart'; +import 'mediadevices.dart'; + +abstract class Navigator { + Future getUserMedia(Map mediaConstraints); + Future getDisplayMedia(Map mediaConstraints); + Future> getSources(); + MediaDevices get mediaDevices; +} diff --git a/lib/src/web/rtc_data_channel.dart b/lib/src/interface/rtc_data_channel.dart similarity index 52% rename from lib/src/web/rtc_data_channel.dart rename to lib/src/interface/rtc_data_channel.dart index b8e8cedda2..91c80b49da 100644 --- a/lib/src/web/rtc_data_channel.dart +++ b/lib/src/interface/rtc_data_channel.dart @@ -1,9 +1,7 @@ import 'dart:async'; -import 'dart:html' as html; -import 'dart:js_util' as jsutil; import 'dart:typed_data'; -import '../enums.dart'; +import 'enums.dart'; class RTCDataChannelInit { bool ordered = true; @@ -22,7 +20,7 @@ class RTCDataChannelInit { if (maxRetransmits > 0) 'maxRetransmits': maxRetransmits, 'protocol': protocol, 'negotiated': negotiated, - if (id != 0) 'id': id + 'id': id }; } } @@ -63,48 +61,18 @@ class RTCDataChannelMessage { } typedef RTCDataChannelStateCallback = void Function(RTCDataChannelState state); + typedef RTCDataChannelOnMessageCallback = void Function( RTCDataChannelMessage data); -class RTCDataChannel { - RTCDataChannel(this._jsDc) { - stateChangeStream = _stateChangeController.stream; - messageStream = _messageController.stream; - _jsDc.onClose.listen((_) { - _state = RTCDataChannelState.RTCDataChannelClosed; - _stateChangeController.add(_state); - if (onDataChannelState != null) { - onDataChannelState(_state); - } - }); - _jsDc.onOpen.listen((_) { - _state = RTCDataChannelState.RTCDataChannelOpen; - _stateChangeController.add(_state); - if (onDataChannelState != null) { - onDataChannelState(_state); - } - }); - _jsDc.onMessage.listen((event) async { - var msg = await _parse(event.data); - _messageController.add(msg); - if (onMessage != null) { - onMessage(msg); - } - }); - } +abstract class RTCDataChannel { + RTCDataChannel(); - final html.RtcDataChannel _jsDc; RTCDataChannelStateCallback onDataChannelState; RTCDataChannelOnMessageCallback onMessage; - RTCDataChannelState _state = RTCDataChannelState.RTCDataChannelConnecting; /// Get current state. - RTCDataChannelState get state => _state; - - final _stateChangeController = - StreamController.broadcast(sync: true); - final _messageController = - StreamController.broadcast(sync: true); + RTCDataChannelState get state; /// Stream of state change events. Emits the new state on change. /// Closes when the [RTCDataChannel] is closed. @@ -114,32 +82,12 @@ class RTCDataChannel { /// Closes when the [RTCDataChannel] is closed. Stream messageStream; - Future _parse(dynamic data) async { - if (data is String) return RTCDataChannelMessage(data); - dynamic arrayBuffer; - if (data is html.Blob) { - // This should never happen actually - arrayBuffer = await jsutil - .promiseToFuture(jsutil.callMethod(data, 'arrayBuffer', [])); - } else { - arrayBuffer = data; - } - return RTCDataChannelMessage.fromBinary(arrayBuffer.asUint8List()); - } + /// Send a message to this datachannel. + /// To send a text message, use the default constructor to instantiate a text [RTCDataChannelMessage] + /// for the [message] parameter. + /// To send a binary message, pass a binary [RTCDataChannelMessage] + /// constructed with [RTCDataChannelMessage.fromBinary] + Future send(RTCDataChannelMessage message); - Future send(RTCDataChannelMessage message) { - if (!message.isBinary) { - _jsDc.send(message.text); - } else { - // This may just work - _jsDc.sendByteBuffer(message.binary.buffer); - // If not, convert to ArrayBuffer/Blob - } - return Future.value(); - } - - Future close() { - _jsDc.close(); - return Future.value(); - } + Future close(); } diff --git a/lib/src/web/rtc_dtmf_sender.dart b/lib/src/interface/rtc_dtmf_sender.dart similarity index 77% rename from lib/src/web/rtc_dtmf_sender.dart rename to lib/src/interface/rtc_dtmf_sender.dart index 191a848102..d417acbdb7 100644 --- a/lib/src/web/rtc_dtmf_sender.dart +++ b/lib/src/interface/rtc_dtmf_sender.dart @@ -1,9 +1,4 @@ -import 'dart:html' as html; - -class RTCDTMFSender { - RTCDTMFSender(this._jsDtmfSender); - final html.RtcDtmfSender _jsDtmfSender; - +abstract class RTCDTMFSender { /// tones:A String containing the DTMF codes to be transmitted to the recipient. /// Specifying an empty string as the tones parameter clears the tone /// buffer, aborting any currently queued tones. A "," character inserts @@ -15,11 +10,10 @@ class RTCDTMFSender { /// if you specify a lower value, 30 ms will be used instead); /// the default is 70 ms. Future insertDTMF(String tones, - {int duration = 100, int interToneGap = 70}) async { - return _jsDtmfSender.insertDtmf(tones, duration, interToneGap); - } + {int duration = 100, int interToneGap = 70}); /// Compatible with old methods + @Deprecated('Use method insertDTMF instead') Future sendDtmf(String tones, {int duration = 100, int interToneGap = 70}) => insertDTMF(tones, duration: duration, interToneGap: interToneGap); diff --git a/lib/src/rtc_ice_candidate.dart b/lib/src/interface/rtc_ice_candidate.dart similarity index 65% rename from lib/src/rtc_ice_candidate.dart rename to lib/src/interface/rtc_ice_candidate.dart index 7357833c26..d9ba2d9606 100644 --- a/lib/src/rtc_ice_candidate.dart +++ b/lib/src/interface/rtc_ice_candidate.dart @@ -1,9 +1,5 @@ class RTCIceCandidate { RTCIceCandidate(this.candidate, this.sdpMid, this.sdpMlineIndex); - factory RTCIceCandidate.fromMap(Map map) { - return RTCIceCandidate( - map['candidate'], map['sdpMid'], map['sdpMLineIndex']); - } final String candidate; final String sdpMid; final int sdpMlineIndex; diff --git a/lib/src/interface/rtc_peerconnection.dart b/lib/src/interface/rtc_peerconnection.dart new file mode 100644 index 0000000000..822439ccc1 --- /dev/null +++ b/lib/src/interface/rtc_peerconnection.dart @@ -0,0 +1,115 @@ +import 'enums.dart'; +import 'media_stream.dart'; +import 'media_stream_track.dart'; +import 'rtc_data_channel.dart'; +import 'rtc_dtmf_sender.dart'; +import 'rtc_ice_candidate.dart'; +import 'rtc_rtp_receiver.dart'; +import 'rtc_rtp_sender.dart'; +import 'rtc_session_description.dart'; +import 'rtc_stats_report.dart'; +import 'rtc_track_event.dart'; +import 'rtc_rtp_transceiver.dart'; + +typedef SignalingStateCallback = void Function(RTCSignalingState state); +typedef PeerConnectionStateCallback = void Function( + RTCPeerConnectionState state); +typedef IceGatheringStateCallback = void Function(RTCIceGatheringState state); +typedef IceConnectionStateCallback = void Function(RTCIceConnectionState state); +typedef IceCandidateCallback = void Function(RTCIceCandidate candidate); +typedef AddStreamCallback = void Function(MediaStream stream); +typedef RemoveStreamCallback = void Function(MediaStream stream); +typedef AddTrackCallback = void Function( + MediaStream stream, MediaStreamTrack track); +typedef RemoveTrackCallback = void Function( + MediaStream stream, MediaStreamTrack track); +typedef RTCDataChannelCallback = void Function(RTCDataChannel channel); +typedef RenegotiationNeededCallback = void Function(); + +/// Unified-Plan +typedef UnifiedPlanTrackCallback = void Function(RTCTrackEvent event); + +abstract class RTCPeerConnection { + RTCPeerConnection(); + + // public: delegate + SignalingStateCallback onSignalingState; + PeerConnectionStateCallback onConnectionState; + IceGatheringStateCallback onIceGatheringState; + IceConnectionStateCallback onIceConnectionState; + IceCandidateCallback onIceCandidate; + AddStreamCallback onAddStream; + RemoveStreamCallback onRemoveStream; + AddTrackCallback onAddTrack; + RemoveTrackCallback onRemoveTrack; + RTCDataChannelCallback onDataChannel; + RenegotiationNeededCallback onRenegotiationNeeded; + + /// Unified-Plan + UnifiedPlanTrackCallback onTrack; + + RTCSignalingState get signalingState; + + RTCIceGatheringState get iceGatheringState; + + RTCIceConnectionState get iceConnectionState; + + RTCPeerConnectionState get connectionState; + + Future dispose(); + + Map get getConfiguration; + + Future setConfiguration(Map configuration); + + Future createOffer(Map constraints); + + Future createAnswer(Map constraints); + + Future addStream(MediaStream stream); + + Future removeStream(MediaStream stream); + + Future getLocalDescription(); + Future setLocalDescription(RTCSessionDescription description); + + Future getRemoteDescription(); + Future setRemoteDescription(RTCSessionDescription description); + + Future addCandidate(RTCIceCandidate candidate); + + Future> getStats([MediaStreamTrack track]); + + List getLocalStreams(); + + List getRemoteStreams(); + + Future createDataChannel( + String label, RTCDataChannelInit dataChannelDict); + + Future close(); + + RTCDTMFSender createDtmfSender(MediaStreamTrack track); + + /// Unified-Plan. + List get senders; + + List get receivers; + + List get transceivers; + + Future createSender(String kind, String streamId); + + Future addTrack(MediaStreamTrack track, + [List streams]); + + Future removeTrack(RTCRtpSender sender); + + Future closeSender(RTCRtpSender sender); + + /// 'audio|video', { 'direction': 'recvonly|sendonly|sendrecv' } + Future addTransceiver( + {MediaStreamTrack track, + RTCRtpMediaType kind, + RTCRtpTransceiverInit init}); +} diff --git a/lib/src/rtc_rtcp_parameters.dart b/lib/src/interface/rtc_rtcp_parameters.dart similarity index 100% rename from lib/src/rtc_rtcp_parameters.dart rename to lib/src/interface/rtc_rtcp_parameters.dart diff --git a/lib/src/rtc_rtp_parameters.dart b/lib/src/interface/rtc_rtp_parameters.dart similarity index 100% rename from lib/src/rtc_rtp_parameters.dart rename to lib/src/interface/rtc_rtp_parameters.dart diff --git a/lib/src/interface/rtc_rtp_receiver.dart b/lib/src/interface/rtc_rtp_receiver.dart new file mode 100644 index 0000000000..b654a813de --- /dev/null +++ b/lib/src/interface/rtc_rtp_receiver.dart @@ -0,0 +1,26 @@ +import 'dart:async'; + +import 'enums.dart'; +import 'media_stream_track.dart'; +import 'rtc_rtp_parameters.dart'; + +typedef OnFirstPacketReceivedCallback = void Function( + RTCRtpReceiver rtpReceiver, RTCRtpMediaType mediaType); + +abstract class RTCRtpReceiver { + RTCRtpReceiver(); + + /// public: + OnFirstPacketReceivedCallback onFirstPacketReceived; + + /// The WebRTC specification only defines RTCRtpParameters in terms of senders, + /// but this API also applies them to receivers, similar to ORTC: + /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. + RTCRtpParameters get parameters; + + MediaStreamTrack get track; + + String get receiverId; + + Future dispose(); +} diff --git a/lib/src/interface/rtc_rtp_sender.dart b/lib/src/interface/rtc_rtp_sender.dart new file mode 100644 index 0000000000..02af683f15 --- /dev/null +++ b/lib/src/interface/rtc_rtp_sender.dart @@ -0,0 +1,30 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; + +import 'media_stream_track.dart'; +import 'rtc_dtmf_sender.dart'; +import 'rtc_rtp_parameters.dart'; + +abstract class RTCRtpSender { + RTCRtpSender(); + + Future setParameters(RTCRtpParameters parameters); + + Future replaceTrack(MediaStreamTrack track); + + Future setTrack(MediaStreamTrack track, {bool takeOwnership = true}); + + RTCRtpParameters get parameters; + + MediaStreamTrack get track; + + String get senderId; + + bool get ownsTrack; + + RTCDTMFSender get dtmfSender; + + @mustCallSuper + Future dispose(); +} diff --git a/lib/src/interface/rtc_rtp_transceiver.dart b/lib/src/interface/rtc_rtp_transceiver.dart new file mode 100644 index 0000000000..5f8ed150be --- /dev/null +++ b/lib/src/interface/rtc_rtp_transceiver.dart @@ -0,0 +1,44 @@ +import 'dart:async'; + +import 'enums.dart'; +import 'media_stream.dart'; +import 'rtc_rtp_parameters.dart'; +import 'rtc_rtp_receiver.dart'; +import 'rtc_rtp_sender.dart'; + +List listToRtpEncodings(List> list) { + return list.map((e) => RTCRtpEncoding.fromMap(e)).toList(); +} + +class RTCRtpTransceiverInit { + RTCRtpTransceiverInit({ + this.direction, + this.streams, + this.sendEncodings, + }); + TransceiverDirection direction; + List streams; + List sendEncodings; +} + +abstract class RTCRtpTransceiver { + RTCRtpTransceiver(); + + TransceiverDirection get currentDirection; + + String get mid; + + RTCRtpSender get sender; + + RTCRtpReceiver get receiver; + + bool get stoped; + + String get transceiverId; + + Future setDirection(TransceiverDirection direction); + + Future getCurrentDirection(); + + Future stop(); +} diff --git a/lib/src/rtc_session_description.dart b/lib/src/interface/rtc_session_description.dart similarity index 100% rename from lib/src/rtc_session_description.dart rename to lib/src/interface/rtc_session_description.dart diff --git a/lib/src/interface/rtc_stats_report.dart b/lib/src/interface/rtc_stats_report.dart new file mode 100644 index 0000000000..f25baf694d --- /dev/null +++ b/lib/src/interface/rtc_stats_report.dart @@ -0,0 +1,7 @@ +class StatsReport { + StatsReport(this.id, this.type, this.timestamp, this.values); + final String id; + final String type; + final double timestamp; + final Map values; +} diff --git a/lib/src/interface/rtc_track_event.dart b/lib/src/interface/rtc_track_event.dart new file mode 100644 index 0000000000..2f27f34c1c --- /dev/null +++ b/lib/src/interface/rtc_track_event.dart @@ -0,0 +1,12 @@ +import 'media_stream.dart'; +import 'media_stream_track.dart'; +import 'rtc_rtp_receiver.dart'; +import 'rtc_rtp_transceiver.dart'; + +class RTCTrackEvent { + RTCTrackEvent({this.receiver, this.streams, this.track, this.transceiver}); + final RTCRtpReceiver receiver; + final List streams; + final MediaStreamTrack track; + final RTCRtpTransceiver transceiver; +} diff --git a/lib/src/interface/rtc_video_renderer.dart b/lib/src/interface/rtc_video_renderer.dart new file mode 100644 index 0000000000..e081f82f76 --- /dev/null +++ b/lib/src/interface/rtc_video_renderer.dart @@ -0,0 +1,66 @@ +import 'package:flutter/foundation.dart'; +import 'media_stream.dart'; + +@immutable +class RTCVideoValue { + const RTCVideoValue({ + this.width = 0.0, + this.height = 0.0, + this.rotation = 0, + this.renderVideo = false, + }); + static const RTCVideoValue empty = RTCVideoValue(); + final double width; + final double height; + final int rotation; + final bool renderVideo; + double get aspectRatio { + if (width == 0.0 || height == 0.0) { + return 1.0; + } + return (rotation == 90 || rotation == 270) + ? height / width + : width / height; + } + + RTCVideoValue copyWith({ + double width, + double height, + int rotation, + bool renderVideo, + }) { + return RTCVideoValue( + width: width ?? this.width, + height: height ?? this.height, + rotation: rotation ?? this.rotation, + renderVideo: (this.width != 0 && this.height != 0 && renderVideo) ?? + this.renderVideo, + ); + } + + @override + String toString() => + '$runtimeType(width: $width, height: $height, rotation: $rotation)'; +} + +abstract class VideoRenderer extends ValueNotifier { + VideoRenderer() : super(RTCVideoValue.empty); + + bool get muted; + set muted(bool mute); + + bool get renderVideo; + int get textureId; + + Future initialize(); + + MediaStream get srcObject; + set srcObject(MediaStream stream); + + @override + @mustCallSuper + Future dispose() async { + super.dispose(); + return Future.value(); + } +} diff --git a/lib/src/media_devices.dart b/lib/src/media_devices.dart new file mode 100644 index 0000000000..828551c788 --- /dev/null +++ b/lib/src/media_devices.dart @@ -0,0 +1,23 @@ +import '../flutter_webrtc.dart'; + +class MediaDevices { + @Deprecated( + 'Use the navigator.mediaDevices.getUserMedia(Map) provide from the facrory instead') + static Future getUserMedia( + Map mediaConstraints) async { + return navigator.mediaDevices.getUserMedia(mediaConstraints); + } + + @Deprecated( + 'Use the navigator.mediaDevices.getDisplayMedia(Map) provide from the facrory instead') + static Future getDisplayMedia( + Map mediaConstraints) async { + return navigator.mediaDevices.getDisplayMedia(mediaConstraints); + } + + @Deprecated( + 'Use the navigator.mediaDevices.getSources() provide from the facrory instead') + static Future> getSources() { + return navigator.mediaDevices.getSources(); + } +} diff --git a/lib/src/media_recorder.dart b/lib/src/media_recorder.dart index 4fe8355f2f..8d951e64d7 100644 --- a/lib/src/media_recorder.dart +++ b/lib/src/media_recorder.dart @@ -1,41 +1,26 @@ -import 'dart:async'; -import 'dart:math'; +import '../flutter_webrtc.dart'; +import 'interface/enums.dart'; +import 'interface/media_recorder.dart' as _interface; +import 'interface/media_stream.dart'; +import 'interface/media_stream_track.dart'; -import 'enums.dart'; -import 'media_stream.dart'; -import 'media_stream_track.dart'; -import 'utils.dart'; - -class MediaRecorder { - static final _random = Random(); - final _recorderId = _random.nextInt(0x7FFFFFFF); +class MediaRecorder extends _interface.MediaRecorder { + MediaRecorder() : _delegate = mediaRecorder(); + final MediaRecorder _delegate; + @override Future start(String path, - {MediaStreamTrack videoTrack, RecorderAudioChannel audioChannel - // TODO(cloudwebrtc): add codec/quality options - }) async { - if (path == null) { - throw ArgumentError.notNull('path'); - } - - if (audioChannel == null && videoTrack == null) { - throw Exception('Neither audio nor video track were provided'); - } - - await WebRTC.methodChannel().invokeMethod('startRecordToFile', { - 'path': path, - 'audioChannel': audioChannel?.index, - 'videoTrackId': videoTrack?.id, - 'recorderId': _recorderId - }); - } + {MediaStreamTrack videoTrack, RecorderAudioChannel audioChannel}) => + _delegate.start(path, videoTrack: videoTrack, audioChannel: audioChannel); - void startWeb(MediaStream stream, - {Function(dynamic blob, bool isLastOne) onDataChunk, - String mimeType = 'video/mp4;codecs=h264'}) { - throw 'It\'s for Flutter Web only'; - } + @override + Future stop() => _delegate.stop(); - Future stop() async => await WebRTC.methodChannel() - .invokeMethod('stopRecordToFile', {'recorderId': _recorderId}); + @override + void startWeb( + MediaStream stream, { + Function(dynamic blob, bool isLastOne) onDataChunk, + String mimeType, + }) => + _delegate.startWeb(stream, onDataChunk: onDataChunk, mimeType: mimeType); } diff --git a/lib/src/native/factory_impl.dart b/lib/src/native/factory_impl.dart new file mode 100644 index 0000000000..5d9569afa4 --- /dev/null +++ b/lib/src/native/factory_impl.dart @@ -0,0 +1,68 @@ +import 'dart:async'; + +import '../interface/factory.dart'; +import '../interface/media_recorder.dart'; +import '../interface/media_stream.dart'; +import '../interface/navigator.dart'; +import '../interface/rtc_peerconnection.dart'; +import '../interface/rtc_video_renderer.dart'; +import 'media_recorder_impl.dart'; +import 'media_stream_impl.dart'; +import 'navigator_impl.dart'; +import 'rtc_peerconnection_impl.dart'; +import 'rtc_video_renderer_impl.dart'; +import 'utils.dart'; + +class RTCFactoryNative extends RTCFactory { + RTCFactoryNative._internal(); + + static final RTCFactory instance = RTCFactoryNative._internal(); + + @override + Future createLocalMediaStream(String label) async { + var _channel = WebRTC.methodChannel(); + + final response = await _channel + .invokeMethod>('createLocalMediaStream'); + + return MediaStreamNative(response['streamId'], label); + } + + @override + Future createPeerConnection( + Map configuration, + [Map constraints = const {}]) async { + var channel = WebRTC.methodChannel(); + + var defaultConstraints = { + 'mandatory': {}, + 'optional': [ + {'DtlsSrtpKeyAgreement': true}, + ], + }; + + final response = await channel.invokeMethod>( + 'createPeerConnection', + { + 'configuration': configuration, + 'constraints': constraints.isEmpty ? defaultConstraints : constraints + }, + ); + + String peerConnectionId = response['peerConnectionId']; + return RTCPeerConnectionNative(peerConnectionId, configuration); + } + + @override + MediaRecorder mediaRecorder() { + return MediaRecorderNative(); + } + + @override + VideoRenderer videoRenderer() { + return RTCVideoRendererNative(); + } + + @override + Navigator get navigator => NavigatorNative(); +} diff --git a/lib/src/native/media_recorder_impl.dart b/lib/src/native/media_recorder_impl.dart new file mode 100644 index 0000000000..f52fbf8cf5 --- /dev/null +++ b/lib/src/native/media_recorder_impl.dart @@ -0,0 +1,44 @@ +import 'dart:async'; +import 'dart:math'; + +import '../interface/enums.dart'; +import '../interface/media_recorder.dart'; +import '../interface/media_stream.dart'; +import '../interface/media_stream_track.dart'; +import 'utils.dart'; + +class MediaRecorderNative extends MediaRecorder { + static final _random = Random(); + final _recorderId = _random.nextInt(0x7FFFFFFF); + + @override + Future start(String path, + {MediaStreamTrack videoTrack, RecorderAudioChannel audioChannel + // TODO(cloudwebrtc): add codec/quality options + }) async { + if (path == null) { + throw ArgumentError.notNull('path'); + } + + if (audioChannel == null && videoTrack == null) { + throw Exception('Neither audio nor video track were provided'); + } + + await WebRTC.methodChannel().invokeMethod('startRecordToFile', { + 'path': path, + 'audioChannel': audioChannel?.index, + 'videoTrackId': videoTrack?.id, + 'recorderId': _recorderId + }); + } + + @override + void startWeb(MediaStream stream, + {Function(dynamic blob, bool isLastOne) onDataChunk, String mimeType}) { + throw 'It\'s for Flutter Web only'; + } + + @override + Future stop() async => await WebRTC.methodChannel() + .invokeMethod('stopRecordToFile', {'recorderId': _recorderId}); +} diff --git a/lib/src/media_stream.dart b/lib/src/native/media_stream_impl.dart similarity index 64% rename from lib/src/media_stream.dart rename to lib/src/native/media_stream_impl.dart index ebf1eda962..968dd1e549 100644 --- a/lib/src/media_stream.dart +++ b/lib/src/native/media_stream_impl.dart @@ -1,47 +1,54 @@ import 'dart:async'; -import 'media_stream_track.dart'; +import '../interface/media_stream.dart'; +import '../interface/media_stream_track.dart'; +import 'media_stream_track_impl.dart'; import 'utils.dart'; -typedef MediaTrackCallback = void Function(MediaStreamTrack track); +class MediaStreamNative extends MediaStream { + MediaStreamNative(String streamId, String ownerTag) + : super(streamId, ownerTag); -class MediaStream { - MediaStream(this._streamId, this._ownerTag); - factory MediaStream.fromMap(Map map) { - return MediaStream(map['streamId'], map['ownerTag']) + factory MediaStreamNative.fromMap(Map map) { + return MediaStreamNative(map['streamId'], map['ownerTag']) ..setMediaTracks(map['audioTracks'], map['videoTracks']); } + final _channel = WebRTC.methodChannel(); - final String _streamId; - final String _ownerTag; + final _audioTracks = []; final _videoTracks = []; - String get ownerTag => _ownerTag; - String get id => _streamId; - MediaTrackCallback onAddTrack; - MediaTrackCallback onRemoveTrack; void setMediaTracks(List audioTracks, List videoTracks) { _audioTracks.clear(); audioTracks.forEach((track) { - _audioTracks.add(MediaStreamTrack.fromMap(track)); + _audioTracks.add(MediaStreamTrackNative( + track['id'], track['label'], track['kind'], track['enabled'])); }); _videoTracks.clear(); videoTracks.forEach((track) { - _videoTracks.add(MediaStreamTrack.fromMap(track)); + _videoTracks.add(MediaStreamTrackNative( + track['id'], track['label'], track['kind'], track['enabled'])); }); } + @override + List getTracks() { + return [..._audioTracks, ..._videoTracks]; + } + + @override Future getMediaTracks() async { final response = await _channel.invokeMethod>( 'mediaStreamGetTracks', - {'streamId': _streamId}, + {'streamId': id}, ); setMediaTracks(response['audioTracks'], response['videoTracks']); } + @override Future addTrack(MediaStreamTrack track, {bool addToNative = true}) async { if (track.kind == 'audio') { @@ -52,10 +59,11 @@ class MediaStream { if (addToNative) { await _channel.invokeMethod('mediaStreamAddTrack', - {'streamId': _streamId, 'trackId': track.id}); + {'streamId': id, 'trackId': track.id}); } } + @override Future removeTrack(MediaStreamTrack track, {bool removeFromNative = true}) async { if (track.kind == 'audio') { @@ -66,26 +74,25 @@ class MediaStream { if (removeFromNative) { await _channel.invokeMethod('mediaStreamRemoveTrack', - {'streamId': _streamId, 'trackId': track.id}); + {'streamId': id, 'trackId': track.id}); } } - List getTracks() { - return [..._audioTracks, ..._videoTracks]; - } - + @override List getAudioTracks() { return _audioTracks; } + @override List getVideoTracks() { return _videoTracks; } + @override Future dispose() async { await _channel.invokeMethod( 'streamDispose', - {'streamId': _streamId}, + {'streamId': id}, ); } } diff --git a/lib/src/media_stream_track.dart b/lib/src/native/media_stream_track_impl.dart similarity index 78% rename from lib/src/media_stream_track.dart rename to lib/src/native/media_stream_track_impl.dart index 6206e7f97a..5f3d08012e 100644 --- a/lib/src/media_stream_track.dart +++ b/lib/src/native/media_stream_track_impl.dart @@ -1,54 +1,58 @@ import 'dart:async'; -import 'utils.dart'; -typedef StreamTrackCallback = Function(); +import '../interface/media_stream_track.dart'; +import 'utils.dart'; -class MediaStreamTrack { - MediaStreamTrack(this._trackId, this._label, this._kind, this._enabled); - factory MediaStreamTrack.fromMap(Map map) { - return MediaStreamTrack( +class MediaStreamTrackNative extends MediaStreamTrack { + MediaStreamTrackNative(this._trackId, this._label, this._kind, this._enabled); + factory MediaStreamTrackNative.fromMap(Map map) { + return MediaStreamTrackNative( map['id'], map['label'], map['kind'], map['enabled']); } - final _channel = WebRTC.methodChannel(); final String _trackId; final String _label; final String _kind; bool _enabled; - StreamTrackCallback onended; - StreamTrackCallback onmute; + @override set enabled(bool enabled) { _channel.invokeMethod('mediaStreamTrackSetEnable', {'trackId': _trackId, 'enabled': enabled}); _enabled = enabled; } + @override bool get enabled => _enabled; + @override String get label => _label; + @override String get kind => _kind; + @override String get id => _trackId; + @override Future hasTorch() => _channel.invokeMethod( 'mediaStreamTrackHasTorch', {'trackId': _trackId}, ); + @override Future setTorch(bool torch) => _channel.invokeMethod( 'mediaStreamTrackSetTorch', {'trackId': _trackId, 'torch': torch}, ); - ///Future contains isFrontCamera - ///Throws error if switching camera failed + @override Future switchCamera() => _channel.invokeMethod( 'mediaStreamTrackSwitchCamera', {'trackId': _trackId}, ); + @override void setVolume(double volume) async { await _channel.invokeMethod( 'setVolume', @@ -56,6 +60,7 @@ class MediaStreamTrack { ); } + @override void setMicrophoneMute(bool mute) async { print('MediaStreamTrack:setMicrophoneMute $mute'); await _channel.invokeMethod( @@ -64,6 +69,7 @@ class MediaStreamTrack { ); } + @override void enableSpeakerphone(bool enable) async { print('MediaStreamTrack:enableSpeakerphone $enable'); await _channel.invokeMethod( @@ -72,7 +78,7 @@ class MediaStreamTrack { ); } - /// On Flutter Web returns Future which contains data url on success + @override Future captureFrame([String filePath]) { return _channel.invokeMethod( 'captureFrame', @@ -80,10 +86,16 @@ class MediaStreamTrack { ); } + @override Future dispose() async { await _channel.invokeMethod( 'trackDispose', {'trackId': _trackId}, ); } + + @override + Future adaptRes(int width, int height) { + throw UnimplementedError(); + } } diff --git a/lib/src/get_user_media.dart b/lib/src/native/mediadevices_impl.dart similarity index 77% rename from lib/src/get_user_media.dart rename to lib/src/native/mediadevices_impl.dart index 2c387a3950..f36e717aa0 100644 --- a/lib/src/get_user_media.dart +++ b/lib/src/native/mediadevices_impl.dart @@ -1,10 +1,15 @@ import 'dart:async'; + import 'package:flutter/services.dart'; -import 'media_stream.dart'; + +import '../interface/media_stream.dart'; +import '../interface/mediadevices.dart'; +import 'media_stream_impl.dart'; import 'utils.dart'; -class MediaDevices { - static Future getUserMedia( +class MediaDeviceNative extends MediaDevices { + @override + Future getUserMedia( Map mediaConstraints) async { var channel = WebRTC.methodChannel(); try { @@ -13,7 +18,7 @@ class MediaDevices { {'constraints': mediaConstraints}, ); String streamId = response['streamId']; - var stream = MediaStream(streamId, 'local'); + var stream = MediaStreamNative(streamId, 'local'); stream.setMediaTracks(response['audioTracks'], response['videoTracks']); return stream; } on PlatformException catch (e) { @@ -21,11 +26,8 @@ class MediaDevices { } } -/* Implement screen sharing, - * use MediaProjection for Android and use ReplayKit for iOS - * TODO(cloudwebrtc): implement for native layer. - * */ - static Future getDisplayMedia( + @override + Future getDisplayMedia( Map mediaConstraints) async { var channel = WebRTC.methodChannel(); try { @@ -34,7 +36,7 @@ class MediaDevices { {'constraints': mediaConstraints}, ); String streamId = response['streamId']; - var stream = MediaStream(streamId, 'local'); + var stream = MediaStreamNative(streamId, 'local'); stream.setMediaTracks(response['audioTracks'], response['videoTracks']); return stream; } on PlatformException catch (e) { @@ -42,7 +44,8 @@ class MediaDevices { } } - static Future> getSources() async { + @override + Future> getSources() async { var channel = WebRTC.methodChannel(); try { final response = await channel.invokeMethod>( diff --git a/lib/src/native/navigator_impl.dart b/lib/src/native/navigator_impl.dart new file mode 100644 index 0000000000..882efe26f4 --- /dev/null +++ b/lib/src/native/navigator_impl.dart @@ -0,0 +1,24 @@ +import '../interface/media_stream.dart'; +import '../interface/mediadevices.dart'; +import '../interface/navigator.dart'; +import 'mediadevices_impl.dart'; + +class NavigatorNative extends Navigator { + @override + Future getDisplayMedia(Map mediaConstraints) { + return mediaDevices.getDisplayMedia(mediaConstraints); + } + + @override + Future getSources() { + return mediaDevices.getSources(); + } + + @override + Future getUserMedia(Map mediaConstraints) { + return mediaDevices.getUserMedia(mediaConstraints); + } + + @override + MediaDevices get mediaDevices => MediaDeviceNative(); +} diff --git a/lib/src/native/rtc_data_channel_impl.dart b/lib/src/native/rtc_data_channel_impl.dart new file mode 100644 index 0000000000..b54fadf4eb --- /dev/null +++ b/lib/src/native/rtc_data_channel_impl.dart @@ -0,0 +1,103 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import '../interface/enums.dart'; +import '../interface/rtc_data_channel.dart'; +import 'utils.dart'; + +final _typeStringToMessageType = { + 'text': MessageType.text, + 'binary': MessageType.binary +}; + +/// A class that represents a WebRTC datachannel. +/// Can send and receive text and binary messages. +class RTCDataChannelNative extends RTCDataChannel { + RTCDataChannelNative( + this._peerConnectionId, this._label, this._dataChannelId) { + stateChangeStream = _stateChangeController.stream; + messageStream = _messageController.stream; + _eventSubscription = _eventChannelFor(_peerConnectionId, _dataChannelId) + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + final String _peerConnectionId; + final String _label; + final int _dataChannelId; + RTCDataChannelState _state; + final _channel = WebRTC.methodChannel(); + StreamSubscription _eventSubscription; + + @override + RTCDataChannelState get state => _state; + + /// Get label. + String get label => _label; + + final _stateChangeController = + StreamController.broadcast(sync: true); + final _messageController = + StreamController.broadcast(sync: true); + + /// RTCDataChannel event listener. + void eventListener(dynamic event) { + final Map map = event; + switch (map['event']) { + case 'dataChannelStateChanged': + //int dataChannelId = map['id']; + _state = rtcDataChannelStateForString(map['state']); + onDataChannelState?.call(_state); + + _stateChangeController.add(_state); + break; + case 'dataChannelReceiveMessage': + //int dataChannelId = map['id']; + + var type = _typeStringToMessageType[map['type']]; + dynamic data = map['data']; + RTCDataChannelMessage message; + if (type == MessageType.binary) { + message = RTCDataChannelMessage.fromBinary(data); + } else { + message = RTCDataChannelMessage(data); + } + + onMessage?.call(message); + + _messageController.add(message); + break; + } + } + + EventChannel _eventChannelFor(String peerConnectionId, int dataChannelId) { + return EventChannel( + 'FlutterWebRTC/dataChannelEvent$peerConnectionId$dataChannelId'); + } + + void errorListener(Object obj) { + final PlatformException e = obj; + throw e; + } + + @override + Future send(RTCDataChannelMessage message) async { + await _channel.invokeMethod('dataChannelSend', { + 'peerConnectionId': _peerConnectionId, + 'dataChannelId': _dataChannelId, + 'type': message.isBinary ? 'binary' : 'text', + 'data': message.isBinary ? message.binary : message.text, + }); + } + + @override + Future close() async { + await _stateChangeController.close(); + await _messageController.close(); + await _eventSubscription?.cancel(); + await _channel.invokeMethod('dataChannelClose', { + 'peerConnectionId': _peerConnectionId, + 'dataChannelId': _dataChannelId + }); + } +} diff --git a/lib/src/native/rtc_dtmf_sender_impl.dart b/lib/src/native/rtc_dtmf_sender_impl.dart new file mode 100644 index 0000000000..7b92d0088d --- /dev/null +++ b/lib/src/native/rtc_dtmf_sender_impl.dart @@ -0,0 +1,22 @@ +import '../interface/rtc_dtmf_sender.dart'; +import 'utils.dart'; + +class RTCDTMFSenderNative extends RTCDTMFSender { + RTCDTMFSenderNative(this._peerConnectionId, this._rtpSenderId); + // peer connection Id must be defined as a variable where this function will be called. + final String _peerConnectionId; + final String _rtpSenderId; + final _channel = WebRTC.methodChannel(); + + @override + Future insertDTMF(String tones, + {int duration = 100, int interToneGap = 70}) async { + await _channel.invokeMethod('sendDtmf', { + 'peerConnectionId': _peerConnectionId, + 'rtpSenderId': _rtpSenderId, + 'tone': tones, + 'duration': duration, + 'gap': interToneGap, + }); + } +} diff --git a/lib/src/native/rtc_peerconnection_factory.dart b/lib/src/native/rtc_peerconnection_factory.dart new file mode 100644 index 0000000000..3ebe369809 --- /dev/null +++ b/lib/src/native/rtc_peerconnection_factory.dart @@ -0,0 +1,29 @@ +import 'dart:async'; + +import '../interface/media_recorder.dart'; +import '../interface/media_stream.dart'; +import '../interface/navigator.dart'; +import '../interface/rtc_peerconnection.dart'; +import '../interface/rtc_video_renderer.dart'; +import 'factory_impl.dart'; + +Future createPeerConnection( + Map configuration, + [Map constraints = const {}]) async { + return RTCFactoryNative.instance + .createPeerConnection(configuration, constraints); +} + +Future createLocalMediaStream(String label) async { + return RTCFactoryNative.instance.createLocalMediaStream(label); +} + +MediaRecorder mediaRecorder() { + return RTCFactoryNative.instance.mediaRecorder(); +} + +VideoRenderer videoRenderer() { + return RTCFactoryNative.instance.videoRenderer(); +} + +Navigator get navigator => RTCFactoryNative.instance.navigator; diff --git a/lib/src/rtc_peerconnection.dart b/lib/src/native/rtc_peerconnection_impl.dart similarity index 84% rename from lib/src/rtc_peerconnection.dart rename to lib/src/native/rtc_peerconnection_impl.dart index 46dccd31ee..4724502993 100644 --- a/lib/src/rtc_peerconnection.dart +++ b/lib/src/native/rtc_peerconnection_impl.dart @@ -2,46 +2,33 @@ import 'dart:async'; import 'package:flutter/services.dart'; -import 'enums.dart'; -import 'media_stream.dart'; -import 'media_stream_track.dart'; -import 'rtc_data_channel.dart'; -import 'rtc_dtmf_sender.dart'; -import 'rtc_ice_candidate.dart'; -import 'rtc_rtp_receiver.dart'; -import 'rtc_rtp_sender.dart'; -import 'rtc_rtp_transceiver.dart'; -import 'rtc_session_description.dart'; -import 'rtc_stats_report.dart'; -import 'rtc_track_event.dart'; +import '../interface/enums.dart'; +import '../interface/media_stream.dart'; +import '../interface/media_stream_track.dart'; +import '../interface/rtc_data_channel.dart'; +import '../interface/rtc_dtmf_sender.dart'; +import '../interface/rtc_ice_candidate.dart'; +import '../interface/rtc_peerconnection.dart'; +import '../interface/rtc_rtp_receiver.dart'; +import '../interface/rtc_rtp_sender.dart'; +import '../interface/rtc_rtp_transceiver.dart'; +import '../interface/rtc_session_description.dart'; +import '../interface/rtc_stats_report.dart'; +import '../interface/rtc_track_event.dart'; +import 'media_stream_impl.dart'; +import 'media_stream_track_impl.dart'; +import 'rtc_data_channel_impl.dart'; +import 'rtc_dtmf_sender_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_sender_impl.dart'; +import 'rtc_rtp_transceiver_impl.dart'; import 'utils.dart'; -/* - * Delegate for PeerConnection. - */ -typedef SignalingStateCallback = void Function(RTCSignalingState state); -typedef PeerConnectionStateCallback = void Function( - RTCPeerConnectionState state); -typedef IceGatheringStateCallback = void Function(RTCIceGatheringState state); -typedef IceConnectionStateCallback = void Function(RTCIceConnectionState state); -typedef IceCandidateCallback = void Function(RTCIceCandidate candidate); -typedef AddStreamCallback = void Function(MediaStream stream); -typedef RemoveStreamCallback = void Function(MediaStream stream); -typedef AddTrackCallback = void Function( - MediaStream stream, MediaStreamTrack track); -typedef RemoveTrackCallback = void Function( - MediaStream stream, MediaStreamTrack track); -typedef RTCDataChannelCallback = void Function(RTCDataChannel channel); -typedef RenegotiationNeededCallback = void Function(); - -/// Unified-Plan -typedef UnifiedPlanTrackCallback = void Function(RTCTrackEvent event); - /* * PeerConnection */ -class RTCPeerConnection { - RTCPeerConnection(this._peerConnectionId, this._configuration) { +class RTCPeerConnectionNative extends RTCPeerConnection { + RTCPeerConnectionNative(this._peerConnectionId, this._configuration) { _eventSubscription = _eventChannelFor(_peerConnectionId) .receiveBroadcastStream() .listen(eventListener, onError: errorListener); @@ -56,27 +43,12 @@ class RTCPeerConnection { final List _senders = []; final List _receivers = []; final List _transceivers = []; - RTCDataChannel _dataChannel; + RTCDataChannelNative _dataChannel; Map _configuration; RTCSignalingState _signalingState; - RTCPeerConnectionState _connectionState; RTCIceGatheringState _iceGatheringState; RTCIceConnectionState _iceConnectionState; - // public: delegate - SignalingStateCallback onSignalingState; - PeerConnectionStateCallback onConnectionState; - IceGatheringStateCallback onIceGatheringState; - IceConnectionStateCallback onIceConnectionState; - IceCandidateCallback onIceCandidate; - AddStreamCallback onAddStream; - RemoveStreamCallback onRemoveStream; - AddTrackCallback onAddTrack; - RemoveTrackCallback onRemoveTrack; - RTCDataChannelCallback onDataChannel; - RenegotiationNeededCallback onRenegotiationNeeded; - - /// Unified-Plan - UnifiedPlanTrackCallback onTrack; + RTCPeerConnectionState _connectionState; final Map defaultSdpConstraints = { 'mandatory': { @@ -86,12 +58,16 @@ class RTCPeerConnection { 'optional': [], }; + @override RTCSignalingState get signalingState => _signalingState; + @override RTCIceGatheringState get iceGatheringState => _iceGatheringState; + @override RTCIceConnectionState get iceConnectionState => _iceConnectionState; + @override RTCPeerConnectionState get connectionState => _connectionState; Future get localDescription => getLocalDescription(); @@ -132,7 +108,7 @@ class RTCPeerConnection { var stream = _remoteStreams.firstWhere((it) => it.id == streamId, orElse: () { - var newStream = MediaStream(streamId, _peerConnectionId); + var newStream = MediaStreamNative(streamId, _peerConnectionId); newStream.setMediaTracks(map['audioTracks'], map['videoTracks']); return newStream; }); @@ -153,13 +129,13 @@ class RTCPeerConnection { String streamId = map['streamId']; Map track = map['track']; - var newTrack = MediaStreamTrack( + var newTrack = MediaStreamTrackNative( map['trackId'], track['label'], track['kind'], track['enabled']); String kind = track['kind']; var stream = _remoteStreams.firstWhere((it) => it.id == streamId, orElse: () { - var newStream = MediaStream(streamId, _peerConnectionId); + var newStream = MediaStreamNative(streamId, _peerConnectionId); _remoteStreams.add(newStream); return newStream; }); @@ -183,14 +159,15 @@ class RTCPeerConnection { return null; }); Map track = map['track']; - var oldTrack = MediaStreamTrack( + var oldTrack = MediaStreamTrackNative( map['trackId'], track['label'], track['kind'], track['enabled']); onRemoveTrack?.call(stream, oldTrack); break; case 'didOpenDataChannel': int dataChannelId = map['id']; String label = map['label']; - _dataChannel = RTCDataChannel(_peerConnectionId, label, dataChannelId); + _dataChannel = + RTCDataChannelNative(_peerConnectionId, label, dataChannelId); onDataChannel?.call(_dataChannel); break; case 'onRenegotiationNeeded': @@ -200,11 +177,12 @@ class RTCPeerConnection { /// Unified-Plan case 'onTrack': var params = map['streams'] as List; - var streams = params.map((e) => MediaStream.fromMap(e)).toList(); + var streams = params.map((e) => MediaStreamNative.fromMap(e)).toList(); onTrack?.call(RTCTrackEvent( - receiver: RTCRtpReceiver.fromMap(map['receiver']), - track: MediaStreamTrack.fromMap(map['track']), - streams: streams)); + receiver: RTCRtpReceiverNative.fromMap(map['receiver']), + streams: streams, + track: MediaStreamTrackNative.fromMap(map['track']), + )); break; /// Other @@ -240,6 +218,7 @@ class RTCPeerConnection { throw e; } + @override Future dispose() async { await _eventSubscription?.cancel(); await _channel.invokeMethod( @@ -252,8 +231,10 @@ class RTCPeerConnection { return EventChannel('FlutterWebRTC/peerConnectoinEvent$peerConnectionId'); } + @override Map get getConfiguration => _configuration; + @override Future setConfiguration(Map configuration) async { _configuration = configuration; try { @@ -266,6 +247,7 @@ class RTCPeerConnection { } } + @override Future createOffer( [Map constraints = const {}]) async { try { @@ -284,6 +266,7 @@ class RTCPeerConnection { } } + @override Future createAnswer( Map constraints) async { try { @@ -301,6 +284,7 @@ class RTCPeerConnection { } } + @override Future addStream(MediaStream stream) async { _localStreams.add(stream); await _channel.invokeMethod('addStream', { @@ -309,6 +293,7 @@ class RTCPeerConnection { }); } + @override Future removeStream(MediaStream stream) async { _localStreams.removeWhere((it) => it.id == stream.id); await _channel.invokeMethod('removeStream', { @@ -317,6 +302,7 @@ class RTCPeerConnection { }); } + @override Future setLocalDescription(RTCSessionDescription description) async { try { await _channel.invokeMethod('setLocalDescription', { @@ -328,6 +314,7 @@ class RTCPeerConnection { } } + @override Future setRemoteDescription(RTCSessionDescription description) async { try { await _channel.invokeMethod('setRemoteDescription', { @@ -339,6 +326,7 @@ class RTCPeerConnection { } } + @override Future getLocalDescription() async { try { final response = await _channel.invokeMethod>( @@ -353,6 +341,7 @@ class RTCPeerConnection { } } + @override Future getRemoteDescription() async { try { final response = await _channel.invokeMethod>( @@ -367,6 +356,7 @@ class RTCPeerConnection { } } + @override Future addCandidate(RTCIceCandidate candidate) async { await _channel.invokeMethod('addCandidate', { 'peerConnectionId': _peerConnectionId, @@ -374,6 +364,7 @@ class RTCPeerConnection { }); } + @override Future> getStats([MediaStreamTrack track]) async { try { final response = await _channel.invokeMethod>( @@ -395,14 +386,17 @@ class RTCPeerConnection { } } + @override List getLocalStreams() { return _localStreams; } + @override List getRemoteStreams() { return _remoteStreams; } + @override Future createDataChannel( String label, RTCDataChannelInit dataChannelDict) async { try { @@ -413,24 +407,40 @@ class RTCPeerConnection { 'dataChannelDict': dataChannelDict.toMap() }); _dataChannel = - RTCDataChannel(_peerConnectionId, label, dataChannelDict.id); + RTCDataChannelNative(_peerConnectionId, label, dataChannelDict.id); return _dataChannel; } on PlatformException catch (e) { throw 'Unable to RTCPeerConnection::createDataChannel: ${e.message}'; } } + @override RTCDTMFSender createDtmfSender(MediaStreamTrack track) { - return RTCDTMFSender(_peerConnectionId, ''); + return RTCDTMFSenderNative(_peerConnectionId, ''); + } + + @override + Future close() async { + try { + await _channel.invokeMethod('peerConnectionClose', { + 'peerConnectionId': _peerConnectionId, + }); + } on PlatformException catch (e) { + throw 'Unable to RTCPeerConnection::close: ${e.message}'; + } } /// Unified-Plan. + /// @override List get senders => _senders; + @override List get receivers => _receivers; + @override List get transceivers => _transceivers; + @override Future createSender(String kind, String streamId) async { try { final response = await _channel.invokeMethod( @@ -439,7 +449,7 @@ class RTCPeerConnection { 'kind': kind, 'streamId': streamId }); - var sender = RTCRtpSender.fromMap(response); + var sender = RTCRtpSenderNative.fromMap(response); _senders.add(sender); return sender; } on PlatformException catch (e) { @@ -447,6 +457,7 @@ class RTCPeerConnection { } } + @override Future addTrack(MediaStreamTrack track, [List streams]) async { try { @@ -456,7 +467,7 @@ class RTCPeerConnection { 'trackId': track.id, 'streamIds': streams.map((e) => e.id).toList() }); - var sender = RTCRtpSender.fromMap(response); + var sender = RTCRtpSenderNative.fromMap(response); _senders.add(sender); return sender; } on PlatformException catch (e) { @@ -464,6 +475,7 @@ class RTCPeerConnection { } } + @override Future removeTrack(RTCRtpSender sender) async { try { final response = await _channel.invokeMethod( @@ -481,6 +493,7 @@ class RTCPeerConnection { } } + @override Future closeSender(RTCRtpSender sender) async { try { final response = await _channel.invokeMethod( @@ -498,6 +511,7 @@ class RTCPeerConnection { } } + @override Future addTransceiver( {MediaStreamTrack track, RTCRtpMediaType kind, @@ -508,9 +522,10 @@ class RTCPeerConnection { 'peerConnectionId': _peerConnectionId, if (track != null) 'trackId': track.id, if (kind != null) 'mediaType': typeRTCRtpMediaTypetoString[kind], - if (init != null) 'transceiverInit': init.toMap() + if (init != null) + 'transceiverInit': RTCRtpTransceiverInitNative.initToMap(init) }); - var transceiver = RTCRtpTransceiver.fromMap(response, + var transceiver = RTCRtpTransceiverNative.fromMap(response, peerConnectionId: _peerConnectionId); _transceivers.add(transceiver); return transceiver; @@ -518,14 +533,4 @@ class RTCPeerConnection { throw 'Unable to RTCPeerConnection::addTransceiver: ${e.message}'; } } - - Future close() async { - try { - await _channel.invokeMethod('peerConnectionClose', { - 'peerConnectionId': _peerConnectionId, - }); - } on PlatformException catch (e) { - throw 'Unable to RTCPeerConnection::close: ${e.message}'; - } - } } diff --git a/lib/src/native/rtc_rtp_receiver_impl.dart b/lib/src/native/rtc_rtp_receiver_impl.dart new file mode 100644 index 0000000000..e6f381cfce --- /dev/null +++ b/lib/src/native/rtc_rtp_receiver_impl.dart @@ -0,0 +1,36 @@ +import 'dart:async'; + +import '../interface/media_stream_track.dart'; +import '../interface/rtc_rtp_parameters.dart'; +import '../interface/rtc_rtp_receiver.dart'; +import 'media_stream_track_impl.dart'; + +class RTCRtpReceiverNative extends RTCRtpReceiver { + RTCRtpReceiverNative(this._id, this._track, this._parameters); + + factory RTCRtpReceiverNative.fromMap(Map map) { + var track = MediaStreamTrackNative.fromMap(map['track']); + var parameters = RTCRtpParameters.fromMap(map['rtpParameters']); + return RTCRtpReceiverNative(map['receiverId'], track, parameters); + } + + /// private: + String _id; + MediaStreamTrack _track; + RTCRtpParameters _parameters; + + /// The WebRTC specification only defines RTCRtpParameters in terms of senders, + /// but this API also applies them to receivers, similar to ORTC: + /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. + @override + RTCRtpParameters get parameters => _parameters; + + @override + MediaStreamTrack get track => _track; + + @override + String get receiverId => _id; + + @override + Future dispose() async {} +} diff --git a/lib/src/rtc_rtp_sender.dart b/lib/src/native/rtc_rtp_sender_impl.dart similarity index 76% rename from lib/src/rtc_rtp_sender.dart rename to lib/src/native/rtc_rtp_sender_impl.dart index f5d0524c7f..48b053333f 100644 --- a/lib/src/rtc_rtp_sender.dart +++ b/lib/src/native/rtc_rtp_sender_impl.dart @@ -1,21 +1,26 @@ import 'dart:async'; + +import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; -import 'media_stream_track.dart'; -import 'rtc_dtmf_sender.dart'; -import 'rtc_rtp_parameters.dart'; +import '../interface/media_stream_track.dart'; +import '../interface/rtc_dtmf_sender.dart'; +import '../interface/rtc_rtp_parameters.dart'; +import '../interface/rtc_rtp_sender.dart'; +import 'media_stream_track_impl.dart'; +import 'rtc_dtmf_sender_impl.dart'; import 'utils.dart'; -class RTCRtpSender { - RTCRtpSender(this._id, this._track, this._dtmf, this._parameters, +class RTCRtpSenderNative extends RTCRtpSender { + RTCRtpSenderNative(this._id, this._track, this._dtmf, this._parameters, this._ownsTrack, this._peerConnectionId); - factory RTCRtpSender.fromMap(Map map, + factory RTCRtpSenderNative.fromMap(Map map, {String peerConnectionId}) { - return RTCRtpSender( + return RTCRtpSenderNative( map['senderId'], - MediaStreamTrack.fromMap(map['track']), - RTCDTMFSender(peerConnectionId, map['senderId']), + MediaStreamTrackNative.fromMap(map['track']), + RTCDTMFSenderNative(peerConnectionId, map['senderId']), RTCRtpParameters.fromMap(map['rtpParameters']), map['ownsTrack'], peerConnectionId); @@ -33,6 +38,7 @@ class RTCRtpSender { _peerConnectionId = id; } + @override Future setParameters(RTCRtpParameters parameters) async { _parameters = parameters; try { @@ -48,6 +54,7 @@ class RTCRtpSender { } } + @override Future replaceTrack(MediaStreamTrack track) async { try { await _channel.invokeMethod('rtpSenderReplaceTrack', { @@ -60,6 +67,7 @@ class RTCRtpSender { } } + @override Future setTrack(MediaStreamTrack track, {bool takeOwnership = true}) async { try { @@ -74,16 +82,23 @@ class RTCRtpSender { } } + @override RTCRtpParameters get parameters => _parameters; + @override MediaStreamTrack get track => _track; + @override String get senderId => _id; + @override bool get ownsTrack => _ownsTrack; + @override RTCDTMFSender get dtmfSender => _dtmf; + @override + @mustCallSuper Future dispose() async { try { await _channel.invokeMethod('rtpSenderDispose', { diff --git a/lib/src/rtc_rtp_transceiver.dart b/lib/src/native/rtc_rtp_transceiver_impl.dart similarity index 57% rename from lib/src/rtc_rtp_transceiver.dart rename to lib/src/native/rtc_rtp_transceiver_impl.dart index 37794f0291..35600e718a 100644 --- a/lib/src/rtc_rtp_transceiver.dart +++ b/lib/src/native/rtc_rtp_transceiver_impl.dart @@ -1,53 +1,40 @@ import 'dart:async'; import 'package:flutter/services.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; -import 'rtc_rtp_parameters.dart'; -import 'rtc_rtp_receiver.dart'; -import 'rtc_rtp_sender.dart'; +import '../interface/enums.dart'; +import '../interface/media_stream.dart'; +import '../interface/rtc_rtp_parameters.dart'; +import '../interface/rtc_rtp_receiver.dart'; +import '../interface/rtc_rtp_sender.dart'; +import '../interface/rtc_rtp_transceiver.dart'; +import 'media_stream_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_sender_impl.dart'; import 'utils.dart'; -enum TransceiverDirection { - SendRecv, - SendOnly, - RecvOnly, - Inactive, -} - -final typeStringToRtpTransceiverDirection = { - 'sendrecv': TransceiverDirection.SendRecv, - 'sendonly': TransceiverDirection.SendOnly, - 'recvonly': TransceiverDirection.RecvOnly, - 'inactive': TransceiverDirection.Inactive, -}; - -final typeRtpTransceiverDirectionToString = { - TransceiverDirection.SendRecv: 'sendrecv', - TransceiverDirection.SendOnly: 'sendonly', - TransceiverDirection.RecvOnly: 'recvonly', - TransceiverDirection.Inactive: 'inactive', -}; - List listToRtpEncodings(List> list) { return list.map((e) => RTCRtpEncoding.fromMap(e)).toList(); } -class RTCRtpTransceiverInit { - RTCRtpTransceiverInit({this.direction, this.sendEncodings, this.streams}); +class RTCRtpTransceiverInitNative extends RTCRtpTransceiverInit { + RTCRtpTransceiverInitNative(TransceiverDirection direction, + List streams, List sendEncodings) + : super( + direction: direction, + streams: streams, + sendEncodings: sendEncodings); - factory RTCRtpTransceiverInit.fromMap(Map map) { - return RTCRtpTransceiverInit( - direction: typeStringToRtpTransceiverDirection[map['direction']], - sendEncodings: listToRtpEncodings(map['sendEncodings']), - streams: (map['streams'] as List) - .map((e) => MediaStream.fromMap(map)) - .toList()); + factory RTCRtpTransceiverInitNative.fromMap(Map map) { + return RTCRtpTransceiverInitNative( + typeStringToRtpTransceiverDirection[map['direction']], + (map['streams'] as List) + .map((e) => MediaStreamNative.fromMap(map)) + .toList(), + listToRtpEncodings(map['sendEncodings'])); } - TransceiverDirection direction; - List streams; - List sendEncodings; + @override Map toMap() { return { 'direction': typeRtpTransceiverDirectionToString[direction], @@ -56,20 +43,31 @@ class RTCRtpTransceiverInit { 'sendEncodings': sendEncodings.map((e) => e.toMap()).toList(), }; } + + static Map initToMap(RTCRtpTransceiverInit init) { + return { + 'direction': typeRtpTransceiverDirectionToString[init.direction], + if (init.streams != null) + 'streamIds': init.streams.map((e) => e.id).toList(), + if (init.sendEncodings != null) + 'sendEncodings': init.sendEncodings.map((e) => e.toMap()).toList(), + }; + } } -class RTCRtpTransceiver { - RTCRtpTransceiver(this._id, this._direction, this._mid, this._sender, +class RTCRtpTransceiverNative extends RTCRtpTransceiver { + RTCRtpTransceiverNative(this._id, this._direction, this._mid, this._sender, this._receiver, _peerConnectionId); - factory RTCRtpTransceiver.fromMap(Map map, + factory RTCRtpTransceiverNative.fromMap(Map map, {String peerConnectionId}) { - var transceiver = RTCRtpTransceiver( + var transceiver = RTCRtpTransceiverNative( map['transceiverId'], typeStringToRtpTransceiverDirection[map['direction']], map['mid'], - RTCRtpSender.fromMap(map['sender'], peerConnectionId: peerConnectionId), - RTCRtpReceiver.fromMap(map['receiver']), + RTCRtpSenderNative.fromMap(map['sender'], + peerConnectionId: peerConnectionId), + RTCRtpReceiverNative.fromMap(map['receiver']), peerConnectionId); return transceiver; } @@ -87,18 +85,25 @@ class RTCRtpTransceiver { _peerConnectionId = id; } + @override TransceiverDirection get currentDirection => _direction; + @override String get mid => _mid; + @override RTCRtpSender get sender => _sender; + @override RTCRtpReceiver get receiver => _receiver; + @override bool get stoped => _stop; + @override String get transceiverId => _id; + @override Future setDirection(TransceiverDirection direction) async { try { await _channel @@ -112,6 +117,7 @@ class RTCRtpTransceiver { } } + @override Future getCurrentDirection() async { try { final response = await _channel.invokeMethod( @@ -126,6 +132,7 @@ class RTCRtpTransceiver { } } + @override Future stop() async { try { await _channel.invokeMethod('rtpTransceiverStop', { diff --git a/lib/src/native/rtc_track_event_impl.dart b/lib/src/native/rtc_track_event_impl.dart new file mode 100644 index 0000000000..d35a0a2b2e --- /dev/null +++ b/lib/src/native/rtc_track_event_impl.dart @@ -0,0 +1,32 @@ +import '../interface/media_stream.dart'; +import '../interface/media_stream_track.dart'; +import '../interface/rtc_rtp_receiver.dart'; +import '../interface/rtc_rtp_transceiver.dart'; +import '../interface/rtc_track_event.dart'; +import 'media_stream_impl.dart'; +import 'media_stream_track_impl.dart'; +import 'rtc_rtp_receiver_impl.dart'; +import 'rtc_rtp_transceiver_impl.dart'; + +class RTCTrackEventNative extends RTCTrackEvent { + RTCTrackEventNative(RTCRtpReceiver receiver, List streams, + MediaStreamTrack track, RTCRtpTransceiver transceiver) + : super( + receiver: receiver, + streams: streams, + track: track, + transceiver: transceiver); + + factory RTCTrackEventNative.fromMap( + Map map, String peerConnectionId) { + var streamsParams = map['streams'] as List>; + var streams = + streamsParams.map((e) => MediaStreamNative.fromMap(e)).toList(); + return RTCTrackEventNative( + RTCRtpReceiverNative.fromMap(map['receiver']), + streams, + MediaStreamTrackNative.fromMap(map['track']), + RTCRtpTransceiverNative.fromMap(map['transceiver'], + peerConnectionId: peerConnectionId)); + } +} diff --git a/lib/src/native/rtc_video_renderer_impl.dart b/lib/src/native/rtc_video_renderer_impl.dart new file mode 100644 index 0000000000..91b129818a --- /dev/null +++ b/lib/src/native/rtc_video_renderer_impl.dart @@ -0,0 +1,92 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +import '../interface/media_stream.dart'; +import '../interface/rtc_video_renderer.dart'; +import 'utils.dart'; + +class RTCVideoRendererNative extends VideoRenderer { + RTCVideoRendererNative(); + final _channel = WebRTC.methodChannel(); + int _textureId; + MediaStream _srcObject; + StreamSubscription _eventSubscription; + + @override + Future initialize() async { + final response = await _channel + .invokeMethod>('createVideoRenderer', {}); + _textureId = response['textureId']; + _eventSubscription = EventChannel('FlutterWebRTC/Texture$textureId') + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + + @override + int get textureId => _textureId; + + @override + MediaStream get srcObject => _srcObject; + + @override + set srcObject(MediaStream stream) { + if (textureId == null) throw 'Call initialize before setting the stream'; + + _srcObject = stream; + _channel.invokeMethod('videoRendererSetSrcObject', { + 'textureId': textureId, + 'streamId': stream?.id ?? '', + 'ownerTag': stream?.ownerTag ?? '' + }).then((_) { + value = (stream == null) + ? RTCVideoValue.empty + : value.copyWith(renderVideo: renderVideo); + }); + } + + @override + Future dispose() async { + await _eventSubscription?.cancel(); + await _channel.invokeMethod( + 'videoRendererDispose', + {'textureId': _textureId}, + ); + + return super.dispose(); + } + + void eventListener(dynamic event) { + final Map map = event; + switch (map['event']) { + case 'didTextureChangeRotation': + value = + value.copyWith(rotation: map['rotation'], renderVideo: renderVideo); + break; + case 'didTextureChangeVideoSize': + value = value.copyWith( + width: 0.0 + map['width'], + height: 0.0 + map['height'], + renderVideo: renderVideo); + break; + case 'didFirstFrameRendered': + break; + } + } + + void errorListener(Object obj) { + final PlatformException e = obj; + throw e; + } + + @override + bool get renderVideo => srcObject != null; + + @override + bool get muted => throw UnimplementedError(); + + @override + set muted(bool mute) { + throw UnimplementedError(); + } +} diff --git a/lib/src/native/rtc_video_view_impl.dart b/lib/src/native/rtc_video_view_impl.dart new file mode 100644 index 0000000000..faa4a34bfa --- /dev/null +++ b/lib/src/native/rtc_video_view_impl.dart @@ -0,0 +1,67 @@ +import 'dart:math'; + +import 'package:flutter/material.dart'; + +import '../interface/enums.dart'; +import '../interface/rtc_video_renderer.dart'; +import '../rtc_video_renderer.dart'; +import 'rtc_video_renderer_impl.dart'; + +class RTCVideoView extends StatelessWidget { + RTCVideoView( + this._renderer, { + Key key, + this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, + this.mirror = false, + }) : assert(objectFit != null), + assert(mirror != null), + super(key: key); + + final RTCVideoRenderer _renderer; + final RTCVideoViewObjectFit objectFit; + final bool mirror; + + RTCVideoRendererNative get videoRenderer => + _renderer.delegate as RTCVideoRendererNative; + + @override + Widget build(BuildContext context) { + return LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) => + _buildVideoView(constraints)); + } + + Widget _buildVideoView(BoxConstraints constraints) { + return Center( + child: Container( + width: constraints.maxWidth, + height: constraints.maxHeight, + child: FittedBox( + fit: objectFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain + ? BoxFit.contain + : BoxFit.cover, + child: Center( + child: ValueListenableBuilder( + valueListenable: videoRenderer, + builder: + (BuildContext context, RTCVideoValue value, Widget child) { + return SizedBox( + width: constraints.maxHeight * value.aspectRatio, + height: constraints.maxHeight, + child: value.renderVideo ? child : Container(), + ); + }, + child: Transform( + transform: Matrix4.identity()..rotateY(mirror ? -pi : 0.0), + alignment: FractionalOffset.center, + child: videoRenderer.textureId != null + ? Texture(textureId: videoRenderer.textureId) + : Container(), + ), + ), + ), + ), + ), + ); + } +} diff --git a/lib/src/utils.dart b/lib/src/native/utils.dart similarity index 100% rename from lib/src/utils.dart rename to lib/src/native/utils.dart diff --git a/lib/src/rtc_data_channel.dart b/lib/src/rtc_data_channel.dart deleted file mode 100644 index b8f523cd38..0000000000 --- a/lib/src/rtc_data_channel.dart +++ /dev/null @@ -1,184 +0,0 @@ -import 'dart:async'; -import 'dart:typed_data'; - -import 'package:flutter/services.dart'; - -import 'enums.dart'; -import 'utils.dart'; - -final _typeStringToMessageType = { - 'text': MessageType.text, - 'binary': MessageType.binary -}; - -/// Initialization parameters for [RTCDataChannel]. -class RTCDataChannelInit { - bool ordered = true; - int maxRetransmitTime = -1; - int maxRetransmits = -1; - String protocol = 'sctp'; //sctp | quic - bool negotiated = false; - int id = 0; - Map toMap() { - return { - 'ordered': ordered, - 'maxRetransmitTime': maxRetransmitTime, - 'maxRetransmits': maxRetransmits, - 'protocol': protocol, - 'negotiated': negotiated, - 'id': id - }; - } -} - -/// A class that represents a datachannel message. -/// Can either contain binary data as a [Uint8List] or -/// text data as a [String]. -class RTCDataChannelMessage { - /// Construct a text message with a [String]. - RTCDataChannelMessage(String text) { - _data = text; - _isBinary = false; - } - - /// Construct a binary message with a [Uint8List]. - RTCDataChannelMessage.fromBinary(Uint8List binary) { - _data = binary; - _isBinary = true; - } - - dynamic _data; - bool _isBinary; - - /// Tells whether this message contains binary. - /// If this is false, it's a text message. - bool get isBinary => _isBinary; - - MessageType get type => isBinary ? MessageType.binary : MessageType.text; - - /// Text contents of this message as [String]. - /// Use only on text messages. - /// See: [isBinary]. - String get text => _data; - - /// Binary contents of this message as [Uint8List]. - /// Use only on binary messages. - /// See: [isBinary]. - Uint8List get binary => _data; -} - -typedef RTCDataChannelStateCallback = void Function(RTCDataChannelState state); -typedef RTCDataChannelOnMessageCallback = void Function( - RTCDataChannelMessage message); - -/// A class that represents a WebRTC datachannel. -/// Can send and receive text and binary messages. -class RTCDataChannel { - RTCDataChannel(this._peerConnectionId, this._label, this._dataChannelId) { - stateChangeStream = _stateChangeController.stream; - messageStream = _messageController.stream; - _eventSubscription = _eventChannelFor(_peerConnectionId, _dataChannelId) - .receiveBroadcastStream() - .listen(eventListener, onError: errorListener); - } - final String _peerConnectionId; - final String _label; - final int _dataChannelId; - RTCDataChannelState _state; - final _channel = WebRTC.methodChannel(); - StreamSubscription _eventSubscription; - - /// Get current state. - RTCDataChannelState get state => _state; - - /// Get label. - String get label => _label; - - /// Event handler for datachannel state changes. - /// Assign this property to listen for state changes. - /// Will be passed one argument, [state], which is an [RTCDataChannelState]. - RTCDataChannelStateCallback onDataChannelState; - - /// Event handler for messages. Assign this property - /// to listen for messages from this [RTCDataChannel]. - /// Will be passed a a [message] argument, which is an [RTCDataChannelMessage] that will contain either - /// binary data as a [Uint8List] or text data as a [String]. - RTCDataChannelOnMessageCallback onMessage; - - final _stateChangeController = - StreamController.broadcast(sync: true); - final _messageController = - StreamController.broadcast(sync: true); - - /// Stream of state change events. Emits the new state on change. - /// Closes when the [RTCDataChannel] is closed. - Stream stateChangeStream; - - /// Stream of incoming messages. Emits the message. - /// Closes when the [RTCDataChannel] is closed. - Stream messageStream; - - /// RTCDataChannel event listener. - void eventListener(dynamic event) { - final Map map = event; - switch (map['event']) { - case 'dataChannelStateChanged': - //int dataChannelId = map['id']; - _state = rtcDataChannelStateForString(map['state']); - onDataChannelState?.call(_state); - - _stateChangeController.add(_state); - break; - case 'dataChannelReceiveMessage': - //int dataChannelId = map['id']; - - var type = _typeStringToMessageType[map['type']]; - dynamic data = map['data']; - RTCDataChannelMessage message; - if (type == MessageType.binary) { - message = RTCDataChannelMessage.fromBinary(data); - } else { - message = RTCDataChannelMessage(data); - } - - onMessage?.call(message); - - _messageController.add(message); - break; - } - } - - EventChannel _eventChannelFor(String peerConnectionId, int dataChannelId) { - return EventChannel( - 'FlutterWebRTC/dataChannelEvent$peerConnectionId$dataChannelId'); - } - - void errorListener(Object obj) { - final PlatformException e = obj; - throw e; - } - - /// Send a message to this datachannel. - /// To send a text message, use the default constructor to instantiate a text [RTCDataChannelMessage] - /// for the [message] parameter. - /// To send a binary message, pass a binary [RTCDataChannelMessage] - /// constructed with [RTCDataChannelMessage.fromBinary] - Future send(RTCDataChannelMessage message) async { - await _channel.invokeMethod('dataChannelSend', { - 'peerConnectionId': _peerConnectionId, - 'dataChannelId': _dataChannelId, - 'type': message.isBinary ? 'binary' : 'text', - 'data': message.isBinary ? message.binary : message.text, - }); - } - - Future close() async { - await _stateChangeController.close(); - await _messageController.close(); - await _eventSubscription?.cancel(); - await _channel.invokeMethod('dataChannelClose', { - 'peerConnectionId': _peerConnectionId, - 'dataChannelId': _dataChannelId - }); - } -} diff --git a/lib/src/rtc_dtmf_sender.dart b/lib/src/rtc_dtmf_sender.dart deleted file mode 100644 index 057071af84..0000000000 --- a/lib/src/rtc_dtmf_sender.dart +++ /dev/null @@ -1,37 +0,0 @@ -import 'package:flutter/services.dart'; - -import 'utils.dart'; - -class RTCDTMFSender { - RTCDTMFSender(this._peerConnectionId, this._rtpSenderId); - // peer connection Id must be defined as a variable where this function will be called. - final String _rtpSenderId; - final String _peerConnectionId; - final MethodChannel _channel = WebRTC.methodChannel(); - - /// tones:A String containing the DTMF codes to be transmitted to the recipient. - /// Specifying an empty string as the tones parameter clears the tone - /// buffer, aborting any currently queued tones. A "," character inserts - /// a two second delay. - /// duration: This value must be between 40 ms and 6000 ms (6 seconds). - /// The default is 100 ms. - /// interToneGap: The length of time, in milliseconds, to wait between tones. - /// The browser will enforce a minimum value of 30 ms (that is, - /// if you specify a lower value, 30 ms will be used instead); - /// the default is 70 ms. - Future insertDTMF(String tones, - {int duration = 100, int interToneGap = 70}) async { - await _channel.invokeMethod('sendDtmf', { - 'peerConnectionId': _peerConnectionId, - 'rtpSenderId': _rtpSenderId, - 'tone': tones, - 'duration': duration, - 'gap': interToneGap, - }); - } - - /// Compatible with old methods - Future sendDtmf(String tones, - {int duration = 100, int interToneGap = 70}) => - insertDTMF(tones, duration: duration, interToneGap: interToneGap); -} diff --git a/lib/src/rtc_peerconnection_factory.dart b/lib/src/rtc_peerconnection_factory.dart deleted file mode 100644 index 30e716b581..0000000000 --- a/lib/src/rtc_peerconnection_factory.dart +++ /dev/null @@ -1,38 +0,0 @@ -import 'dart:async'; - -import 'media_stream.dart'; -import 'rtc_peerconnection.dart'; -import 'utils.dart'; - -Future createPeerConnection( - Map configuration, - [Map constraints = const {}]) async { - var channel = WebRTC.methodChannel(); - - var defaultConstraints = { - 'mandatory': {}, - 'optional': [ - {'DtlsSrtpKeyAgreement': true}, - ], - }; - - final response = await channel.invokeMethod>( - 'createPeerConnection', - { - 'configuration': configuration, - 'constraints': constraints.isEmpty ? defaultConstraints : constraints - }, - ); - - String peerConnectionId = response['peerConnectionId']; - return RTCPeerConnection(peerConnectionId, configuration); -} - -Future createLocalMediaStream(String label) async { - var _channel = WebRTC.methodChannel(); - - final response = await _channel - .invokeMethod>('createLocalMediaStream'); - - return MediaStream(response['streamId'], label); -} diff --git a/lib/src/rtc_rtp_receiver.dart b/lib/src/rtc_rtp_receiver.dart deleted file mode 100644 index e4009bb6fe..0000000000 --- a/lib/src/rtc_rtp_receiver.dart +++ /dev/null @@ -1,54 +0,0 @@ -import 'dart:async'; - -import 'media_stream_track.dart'; -import 'rtc_rtp_parameters.dart'; - -enum RTCRtpMediaType { - RTCRtpMediaTypeAudio, - RTCRtpMediaTypeVideo, - RTCRtpMediaTypeData, -} - -final typeRTCRtpMediaTypetoString = { - RTCRtpMediaType.RTCRtpMediaTypeAudio: 'audio', - RTCRtpMediaType.RTCRtpMediaTypeVideo: 'video', - RTCRtpMediaType.RTCRtpMediaTypeData: 'data', -}; - -final typeStringToRTCRtpMediaType = { - 'audio': RTCRtpMediaType.RTCRtpMediaTypeAudio, - 'video': RTCRtpMediaType.RTCRtpMediaTypeVideo, - 'data': RTCRtpMediaType.RTCRtpMediaTypeData, -}; - -typedef OnFirstPacketReceivedCallback = void Function( - RTCRtpReceiver rtpReceiver, RTCRtpMediaType mediaType); - -class RTCRtpReceiver { - RTCRtpReceiver(this._id, this._track, this._parameters); - - factory RTCRtpReceiver.fromMap(Map map) { - var track = MediaStreamTrack.fromMap(map['track']); - var parameters = RTCRtpParameters.fromMap(map['rtpParameters']); - return RTCRtpReceiver(map['receiverId'], track, parameters); - } - - /// private: - String _id; - MediaStreamTrack _track; - RTCRtpParameters _parameters; - - /// public: - OnFirstPacketReceivedCallback onFirstPacketReceived; - - /// The WebRTC specification only defines RTCRtpParameters in terms of senders, - /// but this API also applies them to receivers, similar to ORTC: - /// http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. - RTCRtpParameters get parameters => _parameters; - - MediaStreamTrack get track => _track; - - String get receiverId => _id; - - Future dispose() async {} -} diff --git a/lib/src/rtc_stats_report.dart b/lib/src/rtc_stats_report.dart deleted file mode 100644 index 0774e9787c..0000000000 --- a/lib/src/rtc_stats_report.dart +++ /dev/null @@ -1,7 +0,0 @@ -class StatsReport { - StatsReport(this.id, this.type, this.timestamp, this.values); - String id; - String type; - double timestamp; - Map values; -} diff --git a/lib/src/rtc_track_event.dart b/lib/src/rtc_track_event.dart deleted file mode 100644 index 2ef76c16ac..0000000000 --- a/lib/src/rtc_track_event.dart +++ /dev/null @@ -1,23 +0,0 @@ -import 'media_stream.dart'; -import 'media_stream_track.dart'; -import 'rtc_rtp_receiver.dart'; -import 'rtc_rtp_transceiver.dart'; - -class RTCTrackEvent { - RTCTrackEvent({this.receiver, this.streams, this.track, this.transceiver}); - factory RTCTrackEvent.fromMap( - Map map, String peerConnectionId) { - var streamsParams = map['streams'] as List>; - var streams = streamsParams.map((e) => MediaStream.fromMap(e)).toList(); - return RTCTrackEvent( - receiver: RTCRtpReceiver.fromMap(map['receiver']), - streams: streams, - track: MediaStreamTrack.fromMap(map['track']), - transceiver: RTCRtpTransceiver.fromMap(map['transceiver'], - peerConnectionId: peerConnectionId)); - } - final RTCRtpReceiver receiver; - final List streams; - final MediaStreamTrack track; - final RTCRtpTransceiver transceiver; -} diff --git a/lib/src/rtc_video_renderer.dart b/lib/src/rtc_video_renderer.dart new file mode 100644 index 0000000000..3e6b8b7e4e --- /dev/null +++ b/lib/src/rtc_video_renderer.dart @@ -0,0 +1,28 @@ +import '../flutter_webrtc.dart'; +import 'interface/rtc_video_renderer.dart'; + +class RTCVideoRenderer { + RTCVideoRenderer() : _delegate = videoRenderer(); + + final VideoRenderer _delegate; + + VideoRenderer get delegate => _delegate; + + Future initialize() => _delegate.initialize(); + + bool get renderVideo => _delegate.renderVideo; + + bool get muted => _delegate.muted; + + MediaStream get srcObject => _delegate.srcObject; + + set muted(bool mute) => _delegate.muted = mute; + + set srcObject(MediaStream stream) => _delegate.srcObject = stream; + + int get textureId => _delegate.textureId; + + Future dispose() async { + return _delegate.dispose(); + } +} diff --git a/lib/src/rtc_video_view.dart b/lib/src/rtc_video_view.dart deleted file mode 100644 index 63367715f6..0000000000 --- a/lib/src/rtc_video_view.dart +++ /dev/null @@ -1,181 +0,0 @@ -import 'dart:async'; -import 'dart:math'; - -import 'package:flutter/material.dart'; -import 'package:flutter/services.dart'; - -import 'enums.dart'; -import 'media_stream.dart'; -import 'utils.dart'; - -@immutable -class RTCVideoValue { - const RTCVideoValue({ - this.width = 0.0, - this.height = 0.0, - this.rotation = 0, - this.renderVideo = false, - }); - static const empty = RTCVideoValue(); - final double width; - final double height; - final int rotation; - final bool renderVideo; - double get aspectRatio { - if (width == 0.0 || height == 0.0) { - return 1.0; - } - return (rotation == 90 || rotation == 270) - ? height / width - : width / height; - } - - RTCVideoValue copyWith({ - double width, - double height, - int rotation, - bool renderVideo, - }) { - return RTCVideoValue( - width: width ?? this.width, - height: height ?? this.height, - rotation: rotation ?? this.rotation, - renderVideo: (this.width != 0 && this.height != 0 && renderVideo) ?? - this.renderVideo, - ); - } - - @override - String toString() => - '$runtimeType(width: $width, height: $height, rotation: $rotation)'; -} - -class RTCVideoRenderer extends ValueNotifier { - RTCVideoRenderer() : super(RTCVideoValue.empty); - final _channel = WebRTC.methodChannel(); - int _textureId; - MediaStream _srcObject; - StreamSubscription _eventSubscription; - - Future initialize() async { - final response = await _channel - .invokeMethod>('createVideoRenderer', {}); - _textureId = response['textureId']; - _eventSubscription = EventChannel('FlutterWebRTC/Texture$textureId') - .receiveBroadcastStream() - .listen(eventListener, onError: errorListener); - } - - int get textureId => _textureId; - - MediaStream get srcObject => _srcObject; - - set srcObject(MediaStream stream) { - if (textureId == null) throw 'Call initialize before setting the stream'; - _srcObject = stream; - try { - _channel.invokeMethod('videoRendererSetSrcObject', { - 'textureId': textureId, - 'streamId': stream?.id ?? '', - 'ownerTag': stream?.ownerTag ?? '' - }).then((_) { - value = (stream == null) - ? RTCVideoValue.empty - : value.copyWith(renderVideo: renderVideo); - }); - } catch (e) { - print(e.toString()); - } - } - - @override - Future dispose() async { - super.dispose(); - await _eventSubscription?.cancel(); - await _channel.invokeMethod( - 'videoRendererDispose', - {'textureId': _textureId}, - ); - } - - void eventListener(dynamic event) { - final Map map = event; - switch (map['event']) { - case 'didTextureChangeRotation': - value = - value.copyWith(rotation: map['rotation'], renderVideo: renderVideo); - break; - case 'didTextureChangeVideoSize': - value = value.copyWith( - width: 0.0 + map['width'], - height: 0.0 + map['height'], - renderVideo: renderVideo); - break; - case 'didFirstFrameRendered': - break; - } - } - - void errorListener(Object obj) { - final PlatformException e = obj; - throw e; - } - - bool get renderVideo => srcObject != null; -} - -class RTCVideoView extends StatelessWidget { - RTCVideoView( - this._renderer, { - Key key, - this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, - this.mirror = false, - }) : assert(objectFit != null), - assert(mirror != null), - super(key: key); - - final RTCVideoRenderer _renderer; - final RTCVideoViewObjectFit objectFit; - final bool mirror; - - @override - Widget build(BuildContext context) { - return LayoutBuilder( - builder: (BuildContext context, BoxConstraints constraints) => - _buildVideoView(constraints)); - } - - Widget _buildVideoView(BoxConstraints constraints) { - return Center( - child: Container( - width: constraints.maxWidth, - height: constraints.maxHeight, - child: FittedBox( - fit: objectFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain - ? BoxFit.contain - : BoxFit.cover, - child: Center( - child: ValueListenableBuilder( - valueListenable: _renderer, - builder: - (BuildContext context, RTCVideoValue value, Widget child) { - return SizedBox( - width: constraints.maxHeight * value.aspectRatio, - height: constraints.maxHeight, - child: value.renderVideo ? child : Container(), - ); - }, - child: Transform( - transform: Matrix4.identity()..rotateY(mirror ? -pi : 0.0), - alignment: FractionalOffset.center, - child: _renderer.textureId != null - ? Texture(textureId: _renderer.textureId) - : Container(), - ), - ), - ), - ), - ), - ); - } -} diff --git a/lib/src/web/factory_impl.dart b/lib/src/web/factory_impl.dart new file mode 100644 index 0000000000..12fbf07927 --- /dev/null +++ b/lib/src/web/factory_impl.dart @@ -0,0 +1,59 @@ +import 'dart:async'; +import 'dart:convert'; +import 'dart:html' as html; + +import 'package:flutter_webrtc/src/interface/rtc_dtmf_sender.dart'; +import 'package:flutter_webrtc/src/web/rtc_dtmf_sender_impl.dart'; + +import '../interface/factory.dart'; +import '../interface/media_recorder.dart'; +import '../interface/media_stream.dart'; +import '../interface/navigator.dart'; +import '../interface/rtc_peerconnection.dart'; +import '../interface/rtc_video_renderer.dart'; +import 'media_recorder_impl.dart'; +import 'media_stream_impl.dart'; +import 'navigator_impl.dart'; +import 'rtc_peerconnection_impl.dart'; +import 'rtc_video_renderer_impl.dart'; + +class RTCFactoryWeb extends RTCFactory { + RTCFactoryWeb._internal(); + static final instance = RTCFactoryWeb._internal(); + + @override + Future createPeerConnection( + Map configuration, + [Map constraints]) async { + final constr = (constraints != null && constraints.isNotEmpty) + ? constraints + : { + 'mandatory': {}, + 'optional': [ + {'DtlsSrtpKeyAgreement': true}, + ], + }; + final jsRtcPc = html.RtcPeerConnection(configuration, constr); + final _peerConnectionId = base64Encode(jsRtcPc.toString().codeUnits); + return RTCPeerConnectionWeb(_peerConnectionId, jsRtcPc); + } + + @override + Future createLocalMediaStream(String label) async { + final jsMs = html.MediaStream(); + return MediaStreamWeb(jsMs, 'local'); + } + + @override + MediaRecorder mediaRecorder() { + return MediaRecorderWeb(); + } + + @override + VideoRenderer videoRenderer() { + return RTCVideoRendererWeb(); + } + + @override + Navigator get navigator => NavigatorWeb(); +} diff --git a/lib/src/web/media_recorder.dart b/lib/src/web/media_recorder_impl.dart similarity index 79% rename from lib/src/web/media_recorder.dart rename to lib/src/web/media_recorder_impl.dart index 8939eb0c29..d590cdb6dd 100644 --- a/lib/src/web/media_recorder.dart +++ b/lib/src/web/media_recorder_impl.dart @@ -2,16 +2,17 @@ import 'dart:async'; import 'dart:html' as html; import 'dart:js' as js; -import '../enums.dart'; -import 'media_stream.dart'; -import 'media_stream_track.dart'; +import '../interface/enums.dart'; +import '../interface/media_recorder.dart'; +import '../interface/media_stream.dart'; +import '../interface/media_stream_track.dart'; +import 'media_stream_impl.dart'; -class MediaRecorder { +class MediaRecorderWeb extends MediaRecorder { html.MediaRecorder _recorder; Completer _completer; - /// For Android use audioChannel param - /// For iOS use audioTrack + @override Future start( String path, { MediaStreamTrack videoTrack, @@ -22,13 +23,14 @@ class MediaRecorder { throw 'Use startWeb on Flutter Web!'; } - /// Only for Flutter Web + @override void startWeb( MediaStream stream, { Function(dynamic blob, bool isLastOne) onDataChunk, String mimeType = 'video/webm', }) { - _recorder = html.MediaRecorder(stream.jsStream, {'mimeType': mimeType}); + var _native = stream as MediaStreamWeb; + _recorder = html.MediaRecorder(_native.jsStream, {'mimeType': mimeType}); if (onDataChunk == null) { var _chunks = []; _completer = Completer(); @@ -58,6 +60,7 @@ class MediaRecorder { _recorder.start(); } + @override Future stop() { _recorder?.stop(); return _completer?.future ?? Future.value(); diff --git a/lib/src/web/media_stream.dart b/lib/src/web/media_stream.dart deleted file mode 100644 index a0816be421..0000000000 --- a/lib/src/web/media_stream.dart +++ /dev/null @@ -1,47 +0,0 @@ -import 'dart:async'; -import 'dart:html' as html; - -import 'media_stream_track.dart'; - -class MediaStream { - MediaStream(this.jsStream, this._ownerTag); - final html.MediaStream jsStream; - final String _ownerTag; - - String get id => jsStream.id; - - String get ownerTag => _ownerTag; - - Future getMediaTracks() { - return Future.value(); - } - - Future addTrack(MediaStreamTrack track, {bool addToNative = true}) { - if (addToNative) { - jsStream.addTrack(track.jsTrack); - } - return Future.value(); - } - - Future removeTrack(MediaStreamTrack track, - {bool removeFromNative = true}) async { - if (removeFromNative) { - jsStream.removeTrack(track.jsTrack); - } - } - - List getAudioTracks() => jsStream - .getAudioTracks() - .map((jsTrack) => MediaStreamTrack(jsTrack)) - .toList(); - - List getVideoTracks() => jsStream - .getVideoTracks() - .map((jsTrack) => MediaStreamTrack(jsTrack)) - .toList(); - - Future dispose() async { - jsStream.getAudioTracks().forEach((track) => track.stop()); - jsStream.getVideoTracks().forEach((track) => track.stop()); - } -} diff --git a/lib/src/web/media_stream_impl.dart b/lib/src/web/media_stream_impl.dart new file mode 100644 index 0000000000..8a84282738 --- /dev/null +++ b/lib/src/web/media_stream_impl.dart @@ -0,0 +1,58 @@ +import 'dart:async'; +import 'dart:html' as html; + +import '../interface/media_stream.dart'; +import '../interface/media_stream_track.dart'; +import 'media_stream_track_impl.dart'; + +class MediaStreamWeb extends MediaStream { + MediaStreamWeb(this.jsStream, String ownerTag) : super(jsStream.id, ownerTag); + final html.MediaStream jsStream; + + @override + Future getMediaTracks() { + return Future.value(); + } + + @override + Future addTrack(MediaStreamTrack track, {bool addToNative = true}) { + if (addToNative) { + var _native = track as MediaStreamTrackWeb; + jsStream.addTrack(_native.jsTrack); + } + return Future.value(); + } + + @override + Future removeTrack(MediaStreamTrack track, + {bool removeFromNative = true}) async { + if (removeFromNative) { + var _native = track as MediaStreamTrackWeb; + jsStream.removeTrack(_native.jsTrack); + } + } + + @override + List getAudioTracks() => jsStream + .getAudioTracks() + .map((jsTrack) => MediaStreamTrackWeb(jsTrack)) + .toList(); + + @override + List getVideoTracks() => jsStream + .getVideoTracks() + .map((jsTrack) => MediaStreamTrackWeb(jsTrack)) + .toList(); + + @override + Future dispose() async { + jsStream.getAudioTracks().forEach((track) => track.stop()); + jsStream.getVideoTracks().forEach((track) => track.stop()); + return super.dispose(); + } + + @override + List getTracks() { + return [...getAudioTracks(), ...getVideoTracks()]; + } +} diff --git a/lib/src/web/media_stream_track.dart b/lib/src/web/media_stream_track_impl.dart similarity index 71% rename from lib/src/web/media_stream_track.dart rename to lib/src/web/media_stream_track_impl.dart index a2d2ea8365..dcf57714f7 100644 --- a/lib/src/web/media_stream_track.dart +++ b/lib/src/web/media_stream_track_impl.dart @@ -2,32 +2,42 @@ import 'dart:async'; import 'dart:html' as html; import 'dart:js' as js; -class MediaStreamTrack { - const MediaStreamTrack(this.jsTrack); +import '../interface/media_stream_track.dart'; + +class MediaStreamTrackWeb extends MediaStreamTrack { + MediaStreamTrackWeb(this.jsTrack); final html.MediaStreamTrack jsTrack; - set enabled(bool enabled) => jsTrack.enabled = enabled; + @override + String get id => jsTrack.id; - bool get enabled => jsTrack.enabled; + @override + String get kind => jsTrack.kind; + @override String get label => jsTrack.label; - String get kind => jsTrack.kind; + @override + bool get enabled => jsTrack.enabled; - String get id => jsTrack.id; + @override + set enabled(bool b) { + jsTrack.enabled = b; + } - ///Future contains isFrontCamera - ///Throws error if switching camera failed + @override Future switchCamera() async { // TODO(cloudwebrtc): ??? return false; } + @override Future adaptRes(int width, int height) async { // TODO(cloudwebrtc): ??? } + @override void setVolume(double volume) { final constraints = jsTrack.getConstraints(); constraints['volume'] = volume; @@ -35,14 +45,17 @@ class MediaStreamTrack { .callMethod('applyConstraints', [js.JsObject.jsify(constraints)]); } + @override void setMicrophoneMute(bool mute) { jsTrack.enabled = !mute; } + @override void enableSpeakerphone(bool enable) { // Should this throw error? } + @override Future captureFrame([String filePath]) async { final imageCapture = html.ImageCapture(jsTrack); final bitmap = await imageCapture.grabFrame(); @@ -57,8 +70,19 @@ class MediaStreamTrack { return dataUrl; } + @override Future dispose() { jsTrack.stop(); - return Future.value(); + return super.dispose(); + } + + @override + Future hasTorch() { + return Future.value(false); + } + + @override + Future setTorch(bool torch) { + throw UnimplementedError('The web implementation does not support torch'); } } diff --git a/lib/src/web/get_user_media.dart b/lib/src/web/mediadevices_impl.dart similarity index 79% rename from lib/src/web/get_user_media.dart rename to lib/src/web/mediadevices_impl.dart index 2831da1493..702e36b83c 100644 --- a/lib/src/web/get_user_media.dart +++ b/lib/src/web/mediadevices_impl.dart @@ -3,10 +3,13 @@ import 'dart:html' as html; import 'dart:js'; import 'dart:js_util' as jsutil; -import 'media_stream.dart'; +import '../interface/media_stream.dart'; +import '../interface/mediadevices.dart'; +import 'media_stream_impl.dart'; -class MediaDevices { - static Future getUserMedia( +class MediaDevicesWeb extends MediaDevices { + @override + Future getUserMedia( Map mediaConstraints) async { mediaConstraints ??= {}; @@ -22,13 +25,14 @@ class MediaDevices { final mediaDevices = html.window.navigator.mediaDevices; final jsStream = await mediaDevices.getUserMedia(mediaConstraints); - return MediaStream(jsStream, 'local'); + return MediaStreamWeb(jsStream, 'local'); } catch (e) { throw 'Unable to getUserMedia: ${e.toString()}'; } } - static Future getDisplayMedia( + @override + Future getDisplayMedia( Map mediaConstraints) async { try { final mediaDevices = html.window.navigator.mediaDevices; @@ -37,19 +41,20 @@ class MediaDevices { final jsStream = await jsutil.promiseToFuture( jsutil.callMethod(mediaDevices, 'getDisplayMedia', [arg])); - return MediaStream(jsStream, 'local'); + return MediaStreamWeb(jsStream, 'local'); } else { final jsStream = await html.window.navigator.getUserMedia( video: {'mediaSource': 'screen'}, audio: mediaConstraints['audio'] ?? false); - return MediaStream(jsStream, 'local'); + return MediaStreamWeb(jsStream, 'local'); } } catch (e) { throw 'Unable to getDisplayMedia: ${e.toString()}'; } } - static Future> getSources() async { + @override + Future> getSources() async { final devices = await html.window.navigator.mediaDevices.enumerateDevices(); final result = []; for (final device in devices) { diff --git a/lib/src/web/navigator_impl.dart b/lib/src/web/navigator_impl.dart new file mode 100644 index 0000000000..e407a09ab0 --- /dev/null +++ b/lib/src/web/navigator_impl.dart @@ -0,0 +1,24 @@ +import '../interface/media_stream.dart'; +import '../interface/mediadevices.dart'; +import '../interface/navigator.dart'; +import 'mediadevices_impl.dart'; + +class NavigatorWeb extends Navigator { + @override + Future getDisplayMedia(Map mediaConstraints) { + return mediaDevices.getDisplayMedia(mediaConstraints); + } + + @override + Future getSources() { + return mediaDevices.getSources(); + } + + @override + Future getUserMedia(Map mediaConstraints) { + return mediaDevices.getUserMedia(mediaConstraints); + } + + @override + MediaDevices get mediaDevices => MediaDevicesWeb(); +} diff --git a/lib/src/web/rtc_data_channel_impl.dart b/lib/src/web/rtc_data_channel_impl.dart new file mode 100644 index 0000000000..4abaef2963 --- /dev/null +++ b/lib/src/web/rtc_data_channel_impl.dart @@ -0,0 +1,76 @@ +import 'dart:async'; +import 'dart:html' as html; +import 'dart:js_util' as jsutil; + +import '../interface/enums.dart'; +import '../interface/rtc_data_channel.dart'; + +class RTCDataChannelWeb extends RTCDataChannel { + RTCDataChannelWeb(this._jsDc) { + stateChangeStream = _stateChangeController.stream; + messageStream = _messageController.stream; + _jsDc.onClose.listen((_) { + _state = RTCDataChannelState.RTCDataChannelClosed; + _stateChangeController.add(_state); + if (onDataChannelState != null) { + onDataChannelState(_state); + } + }); + _jsDc.onOpen.listen((_) { + _state = RTCDataChannelState.RTCDataChannelOpen; + _stateChangeController.add(_state); + if (onDataChannelState != null) { + onDataChannelState(_state); + } + }); + _jsDc.onMessage.listen((event) async { + var msg = await _parse(event.data); + _messageController.add(msg); + if (onMessage != null) { + onMessage(msg); + } + }); + } + + final html.RtcDataChannel _jsDc; + RTCDataChannelState _state = RTCDataChannelState.RTCDataChannelConnecting; + + @override + RTCDataChannelState get state => _state; + + final _stateChangeController = + StreamController.broadcast(sync: true); + final _messageController = + StreamController.broadcast(sync: true); + + Future _parse(dynamic data) async { + if (data is String) return RTCDataChannelMessage(data); + dynamic arrayBuffer; + if (data is html.Blob) { + // This should never happen actually + arrayBuffer = await jsutil + .promiseToFuture(jsutil.callMethod(data, 'arrayBuffer', [])); + } else { + arrayBuffer = data; + } + return RTCDataChannelMessage.fromBinary(arrayBuffer.asUint8List()); + } + + @override + Future send(RTCDataChannelMessage message) { + if (!message.isBinary) { + _jsDc.send(message.text); + } else { + // This may just work + _jsDc.sendByteBuffer(message.binary.buffer); + // If not, convert to ArrayBuffer/Blob + } + return Future.value(); + } + + @override + Future close() { + _jsDc.close(); + return Future.value(); + } +} diff --git a/lib/src/web/rtc_dtmf_sender_impl.dart b/lib/src/web/rtc_dtmf_sender_impl.dart new file mode 100644 index 0000000000..a3694c1030 --- /dev/null +++ b/lib/src/web/rtc_dtmf_sender_impl.dart @@ -0,0 +1,14 @@ +import 'dart:html' as html; + +import '../interface/rtc_dtmf_sender.dart'; + +class RTCDTMFSenderWeb extends RTCDTMFSender { + RTCDTMFSenderWeb(this._jsDtmfSender); + final html.RtcDtmfSender _jsDtmfSender; + + @override + Future insertDTMF(String tones, + {int duration = 100, int interToneGap = 70}) async { + return _jsDtmfSender.insertDtmf(tones, duration, interToneGap); + } +} diff --git a/lib/src/web/rtc_ice_candidate.dart b/lib/src/web/rtc_ice_candidate.dart deleted file mode 100644 index fa6e3e0ea5..0000000000 --- a/lib/src/web/rtc_ice_candidate.dart +++ /dev/null @@ -1,22 +0,0 @@ -import 'dart:html' as html; - -class RTCIceCandidate { - RTCIceCandidate(this.candidate, this.sdpMid, this.sdpMlineIndex); - RTCIceCandidate.fromJs(html.RtcIceCandidate jsIceCandidate) - : this(jsIceCandidate.candidate, jsIceCandidate.sdpMid, - jsIceCandidate.sdpMLineIndex); - - final String candidate; - final String sdpMid; - final int sdpMlineIndex; - - dynamic toMap() { - return { - 'candidate': candidate, - 'sdpMid': sdpMid, - 'sdpMLineIndex': sdpMlineIndex - }; - } - - html.RtcIceCandidate toJs() => html.RtcIceCandidate(toMap()); -} diff --git a/lib/src/web/rtc_peerconnection_factory.dart b/lib/src/web/rtc_peerconnection_factory.dart index f4463999fe..873a88abdc 100644 --- a/lib/src/web/rtc_peerconnection_factory.dart +++ b/lib/src/web/rtc_peerconnection_factory.dart @@ -1,27 +1,29 @@ import 'dart:async'; -import 'dart:convert'; -import 'dart:html' as html; -import 'media_stream.dart'; -import 'rtc_peerconnection.dart'; +import '../interface/media_recorder.dart'; +import '../interface/media_stream.dart'; +import '../interface/navigator.dart'; +import '../interface/rtc_peerconnection.dart'; +import '../interface/rtc_video_renderer.dart'; +import 'factory_impl.dart'; Future createPeerConnection( Map configuration, - Map constraints) async { - final constr = (constraints != null && constraints.isNotEmpty) - ? constraints - : { - 'mandatory': {}, - 'optional': [ - {'DtlsSrtpKeyAgreement': true}, - ], - }; - final jsRtcPc = html.RtcPeerConnection(configuration, constr); - final _peerConnectionId = base64Encode(jsRtcPc.toString().codeUnits); - return RTCPeerConnection(_peerConnectionId, jsRtcPc); + [Map constraints]) { + return RTCFactoryWeb.instance + .createPeerConnection(configuration, constraints); } -Future createLocalMediaStream(String label) async { - final jsMs = html.MediaStream(); - return MediaStream(jsMs, 'local'); +Future createLocalMediaStream(String label) { + return RTCFactoryWeb.instance.createLocalMediaStream(label); } + +MediaRecorder mediaRecorder() { + return RTCFactoryWeb.instance.mediaRecorder(); +} + +VideoRenderer videoRenderer() { + return RTCFactoryWeb.instance.videoRenderer(); +} + +Navigator get navigator => RTCFactoryWeb.instance.navigator; diff --git a/lib/src/web/rtc_peerconnection.dart b/lib/src/web/rtc_peerconnection_impl.dart similarity index 58% rename from lib/src/web/rtc_peerconnection.dart rename to lib/src/web/rtc_peerconnection_impl.dart index c05491235e..03c65eacdc 100644 --- a/lib/src/web/rtc_peerconnection.dart +++ b/lib/src/web/rtc_peerconnection_impl.dart @@ -3,47 +3,42 @@ import 'dart:html' as html; import 'dart:js' as js; import 'dart:js_util' as jsutil; -import '../enums.dart'; -import '../rtc_stats_report.dart'; -import 'media_stream.dart'; -import 'media_stream_track.dart'; -import 'rtc_data_channel.dart'; -import 'rtc_dtmf_sender.dart'; -import 'rtc_ice_candidate.dart'; -import 'rtc_session_description.dart'; - -/* - * Delegate for PeerConnection. - */ -typedef SignalingStateCallback = void Function(RTCSignalingState state); -typedef IceGatheringStateCallback = void Function(RTCIceGatheringState state); -typedef IceConnectionStateCallback = void Function(RTCIceConnectionState state); -typedef IceCandidateCallback = void Function(RTCIceCandidate candidate); -typedef AddStreamCallback = void Function(MediaStream stream); -typedef RemoveStreamCallback = void Function(MediaStream stream); -typedef AddTrackCallback = void Function( - MediaStream stream, MediaStreamTrack track); -typedef RemoveTrackCallback = void Function( - MediaStream stream, MediaStreamTrack track); -typedef RTCDataChannelCallback = void Function(RTCDataChannel channel); -typedef RenegotiationNeededCallback = void Function(); +import 'package:flutter_webrtc/src/interface/rtc_rtp_transceiver.dart'; + +import 'package:flutter_webrtc/src/interface/rtc_rtp_sender.dart'; + +import 'package:flutter_webrtc/src/interface/rtc_rtp_receiver.dart'; + +import '../interface/enums.dart'; +import '../interface/media_stream.dart'; +import '../interface/media_stream_track.dart'; +import '../interface/rtc_data_channel.dart'; +import '../interface/rtc_dtmf_sender.dart'; +import '../interface/rtc_ice_candidate.dart'; +import '../interface/rtc_peerconnection.dart'; +import '../interface/rtc_session_description.dart'; +import '../interface/rtc_stats_report.dart'; +import 'media_stream_impl.dart'; +import 'media_stream_track_impl.dart'; +import 'rtc_data_channel_impl.dart'; +import 'rtc_dtmf_sender_impl.dart'; /* * PeerConnection */ -class RTCPeerConnection { - RTCPeerConnection(this._peerConnectionId, this._jsPc) { +class RTCPeerConnectionWeb extends RTCPeerConnection { + RTCPeerConnectionWeb(this._peerConnectionId, this._jsPc) { _jsPc.onAddStream.listen((mediaStreamEvent) { final jsStream = mediaStreamEvent.stream; final _remoteStream = _remoteStreams.putIfAbsent( - jsStream.id, () => MediaStream(jsStream, _peerConnectionId)); + jsStream.id, () => MediaStreamWeb(jsStream, _peerConnectionId)); onAddStream?.call(_remoteStream); jsStream.onAddTrack.listen((mediaStreamTrackEvent) { final jsTrack = (mediaStreamTrackEvent as html.MediaStreamTrackEvent).track; - final track = MediaStreamTrack(jsTrack); + final track = MediaStreamTrackWeb(jsTrack); _remoteStream.addTrack(track, addToNative: false).then((_) { onAddTrack?.call(_remoteStream, track); }); @@ -52,7 +47,7 @@ class RTCPeerConnection { jsStream.onRemoveTrack.listen((mediaStreamTrackEvent) { final jsTrack = (mediaStreamTrackEvent as html.MediaStreamTrackEvent).track; - final track = MediaStreamTrack(jsTrack); + final track = MediaStreamTrackWeb(jsTrack); _remoteStream.removeTrack(track, removeFromNative: false).then((_) { onRemoveTrack?.call(_remoteStream, track); }); @@ -60,12 +55,12 @@ class RTCPeerConnection { }); _jsPc.onDataChannel.listen((dataChannelEvent) { - onDataChannel?.call(RTCDataChannel(dataChannelEvent.channel)); + onDataChannel?.call(RTCDataChannelWeb(dataChannelEvent.channel)); }); _jsPc.onIceCandidate.listen((iceEvent) { if (iceEvent.candidate != null) { - onIceCandidate?.call(RTCIceCandidate.fromJs(iceEvent.candidate)); + onIceCandidate?.call(_iceFromJs(iceEvent.candidate)); } }); @@ -113,31 +108,25 @@ class RTCPeerConnection { RTCIceGatheringState _iceGatheringState; RTCIceConnectionState _iceConnectionState; - // public: delegate - SignalingStateCallback onSignalingState; - IceGatheringStateCallback onIceGatheringState; - IceConnectionStateCallback onIceConnectionState; - IceCandidateCallback onIceCandidate; - AddStreamCallback onAddStream; - RemoveStreamCallback onRemoveStream; - AddTrackCallback onAddTrack; - RemoveTrackCallback onRemoveTrack; - RTCDataChannelCallback onDataChannel; - RenegotiationNeededCallback onRenegotiationNeeded; - + @override RTCSignalingState get signalingState => _signalingState; + @override RTCIceGatheringState get iceGatheringState => _iceGatheringState; + @override RTCIceConnectionState get iceConnectionState => _iceConnectionState; + @override Future dispose() { _jsPc.close(); return Future.value(); } + @override Map get getConfiguration => _configuration; + @override Future setConfiguration(Map configuration) { _configuration.addAll(configuration); @@ -145,52 +134,64 @@ class RTCPeerConnection { return Future.value(); } + @override Future createOffer( Map constraints) async { final offer = await _jsPc.createOffer(constraints); - return RTCSessionDescription.fromJs(offer); + return _sessionFromJs(offer); } + @override Future createAnswer( Map constraints) async { final answer = await _jsPc.createAnswer(constraints); - return RTCSessionDescription.fromJs(answer); + return _sessionFromJs(answer); } + @override Future addStream(MediaStream stream) { - _localStreams.putIfAbsent(stream.jsStream.id, - () => MediaStream(stream.jsStream, _peerConnectionId)); - _jsPc.addStream(stream.jsStream); + var _native = stream as MediaStreamWeb; + _localStreams.putIfAbsent( + stream.id, () => MediaStreamWeb(_native.jsStream, _peerConnectionId)); + _jsPc.addStream(_native.jsStream); return Future.value(); } + @override Future removeStream(MediaStream stream) async { - _localStreams.remove(stream.jsStream.id); - _jsPc.removeStream(stream.jsStream); + var _native = stream as MediaStreamWeb; + _localStreams.remove(stream.id); + _jsPc.removeStream(_native.jsStream); return Future.value(); } + @override Future setLocalDescription(RTCSessionDescription description) async { await _jsPc.setLocalDescription(description.toMap()); } + @override Future setRemoteDescription(RTCSessionDescription description) async { await _jsPc.setRemoteDescription(description.toMap()); } + @override Future getLocalDescription() async { - return RTCSessionDescription.fromJs(_jsPc.localDescription); + return _sessionFromJs(_jsPc.localDescription); } + @override Future getRemoteDescription() async { - return RTCSessionDescription.fromJs(_jsPc.remoteDescription); + return _sessionFromJs(_jsPc.remoteDescription); } + @override Future addCandidate(RTCIceCandidate candidate) async { await jsutil.promiseToFuture( - jsutil.callMethod(_jsPc, 'addIceCandidate', [candidate.toJs()])); + jsutil.callMethod(_jsPc, 'addIceCandidate', [_iceToJs(candidate)])); } + @override Future> getStats([MediaStreamTrack track]) async { final stats = await _jsPc.getStats(); var report = []; @@ -201,16 +202,19 @@ class RTCPeerConnection { return report; } + @override List getLocalStreams() => _jsPc .getLocalStreams() .map((jsStream) => _localStreams[jsStream.id]) .toList(); + @override List getRemoteStreams() => _jsPc .getRemoteStreams() .map((jsStream) => _remoteStreams[jsStream.id]) .toList(); + @override Future createDataChannel( String label, RTCDataChannelInit dataChannelDict) { final map = dataChannelDict.toMap(); @@ -219,24 +223,95 @@ class RTCPeerConnection { } final jsDc = _jsPc.createDataChannel(label, map); - return Future.value(RTCDataChannel(jsDc)); + return Future.value(RTCDataChannelWeb(jsDc)); } - Future close() async { + @override + Future close() async { _jsPc.close(); return Future.value(); } + @override + RTCDTMFSender createDtmfSender(MediaStreamTrack track) { + var _native = track as MediaStreamTrackWeb; + var jsDtmfSender = _jsPc.createDtmfSender(_native.jsTrack); + return RTCDTMFSenderWeb(jsDtmfSender); + } + + // + // utility section + // + + RTCIceCandidate _iceFromJs(html.RtcIceCandidate candidate) => RTCIceCandidate( + candidate.candidate, + candidate.sdpMid, + candidate.sdpMLineIndex, + ); + + html.RtcIceCandidate _iceToJs(RTCIceCandidate c) => + html.RtcIceCandidate(c.toMap()); + + RTCSessionDescription _sessionFromJs(html.RtcSessionDescription sd) => + RTCSessionDescription(sd.sdp, sd.type); + + /* //'audio|video', { 'direction': 'recvonly|sendonly|sendrecv' } + @override void addTransceiver(String type, Map options) { if (jsutil.hasProperty(_jsPc, 'addTransceiver')) { final jsOptions = js.JsObject.jsify(options); jsutil.callMethod(_jsPc, 'addTransceiver', [type, jsOptions]); } } + */ + @override + Future addTrack(MediaStreamTrack track, + [List streams]) { + // TODO: implement addTrack + throw UnimplementedError(); + } - RTCDTMFSender createDtmfSender(MediaStreamTrack track) { - var jsDtmfSender = _jsPc.createDtmfSender(track.jsTrack); - return RTCDTMFSender(jsDtmfSender); + @override + Future closeSender(RTCRtpSender sender) { + // TODO: implement closeSender + throw UnimplementedError(); + } + + @override + // TODO: implement connectionState + RTCPeerConnectionState get connectionState => throw UnimplementedError(); + + @override + Future createSender(String kind, String streamId) { + // TODO: implement createSender + throw UnimplementedError(); + } + + @override + // TODO: implement receivers + List get receivers => throw UnimplementedError(); + + @override + Future removeTrack(RTCRtpSender sender) { + // TODO: implement removeTrack + throw UnimplementedError(); + } + + @override + // TODO: implement senders + List get senders => throw UnimplementedError(); + + @override + // TODO: implement transceivers + List get transceivers => throw UnimplementedError(); + + @override + Future addTransceiver( + {MediaStreamTrack track, + RTCRtpMediaType kind, + RTCRtpTransceiverInit init}) { + // TODO: implement addTransceiver + throw UnimplementedError(); } } diff --git a/lib/src/web/rtc_session_description.dart b/lib/src/web/rtc_session_description.dart deleted file mode 100644 index 91c30d9f03..0000000000 --- a/lib/src/web/rtc_session_description.dart +++ /dev/null @@ -1,16 +0,0 @@ -import 'dart:html' as html; -import 'dart:js' as js; - -class RTCSessionDescription { - RTCSessionDescription(this.sdp, this.type); - RTCSessionDescription.fromJs(html.RtcSessionDescription rsd) - : this(rsd.sdp, rsd.type); - RTCSessionDescription.fromJsObj(js.JsObject js) : this(js['sdp'], js['type']); - - String sdp; - String type; - - Map toMap() { - return {'sdp': sdp, 'type': type}; - } -} diff --git a/lib/src/web/rtc_video_view.dart b/lib/src/web/rtc_video_renderer_impl.dart similarity index 60% rename from lib/src/web/rtc_video_view.dart rename to lib/src/web/rtc_video_renderer_impl.dart index 34e5b8b439..4babd58f95 100644 --- a/lib/src/web/rtc_video_view.dart +++ b/lib/src/web/rtc_video_renderer_impl.dart @@ -1,12 +1,12 @@ import 'dart:async'; import 'dart:html' as html; -import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; -import '../enums.dart'; -import './ui_fake.dart' if (dart.library.html) 'dart:ui' as ui; -import 'media_stream.dart'; +import '../interface/media_stream.dart'; +import '../interface/rtc_video_renderer.dart'; +import 'media_stream_impl.dart'; +import 'ui_fake.dart' if (dart.library.html) 'dart:ui' as ui; // An error code value to error name Map. // See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code @@ -31,65 +31,28 @@ const Map _kErrorValueToErrorDescription = { const String _kDefaultErrorMessage = 'No further diagnostic information can be determined or provided.'; -@immutable -class RTCVideoValue { - const RTCVideoValue({ - this.width = 0.0, - this.height = 0.0, - this.rotation = 0, - this.renderVideo = false, - }); - static const RTCVideoValue empty = RTCVideoValue(); - final double width; - final double height; - final int rotation; - final bool renderVideo; - double get aspectRatio { - if (width == 0.0 || height == 0.0) { - return 1.0; - } - return (rotation == 90 || rotation == 270) - ? height / width - : width / height; - } - - RTCVideoValue copyWith({ - double width, - double height, - int rotation, - bool renderVideo, - }) { - return RTCVideoValue( - width: width ?? this.width, - height: height ?? this.height, - rotation: rotation ?? this.rotation, - renderVideo: (this.width != 0 && this.height != 0 && renderVideo) ?? - this.renderVideo, - ); - } - - @override - String toString() => - '$runtimeType(width: $width, height: $height, rotation: $rotation)'; -} - -class RTCVideoRenderer extends ValueNotifier { - RTCVideoRenderer() - : textureId = _textureCounter++, - super(RTCVideoValue.empty); +class RTCVideoRendererWeb extends VideoRenderer { + RTCVideoRendererWeb() : _textureId = _textureCounter++; static int _textureCounter = 1; - final int textureId; + final int _textureId; html.VideoElement videoElement; MediaStream _srcObject; final _subscriptions = []; + @override + int get textureId => _textureId; + + @override bool get muted => videoElement?.muted ?? true; + @override set muted(bool mute) => videoElement?.muted = mute; + @override bool get renderVideo => videoElement != null && srcObject != null; + @override Future initialize() async { videoElement = html.VideoElement() //..src = 'https://flutter-webrtc-video-view-RTCVideoRenderer-$textureId' @@ -158,8 +121,10 @@ class RTCVideoRenderer extends ValueNotifier { renderVideo: renderVideo); } + @override MediaStream get srcObject => _srcObject; + @override set srcObject(MediaStream stream) { if (videoElement == null) throw 'Call initialize before setting the stream'; @@ -169,70 +134,20 @@ class RTCVideoRenderer extends ValueNotifier { return; } _srcObject = stream; - videoElement.srcObject = stream?.jsStream; + var _native = stream as MediaStreamWeb; + videoElement.srcObject = _native.jsStream; videoElement.muted = stream?.ownerTag == 'local' ?? false; value = value.copyWith(renderVideo: renderVideo); } @override Future dispose() async { - super.dispose(); await _srcObject?.dispose(); _srcObject = null; _subscriptions.forEach((s) => s.cancel()); videoElement.removeAttribute('src'); videoElement.load(); - } -} - -class RTCVideoView extends StatefulWidget { - RTCVideoView( - this._renderer, { - Key key, - this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, - this.mirror = false, - }) : assert(objectFit != null), - assert(mirror != null), - super(key: key); - - final RTCVideoRenderer _renderer; - final RTCVideoViewObjectFit objectFit; - final bool mirror; - @override - _RTCVideoViewState createState() => _RTCVideoViewState(); -} -class _RTCVideoViewState extends State { - _RTCVideoViewState(); - - @override - void initState() { - super.initState(); - widget._renderer?.addListener(() => setState(() {})); - } - - Widget buildVideoElementView(RTCVideoViewObjectFit objFit, bool mirror) { - // TODO(cloudwebrtc): Add css style for mirror. - widget._renderer.videoElement.style.objectFit = - objFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain - ? 'contain' - : 'cover'; - return HtmlElementView( - viewType: 'RTCVideoRenderer-${widget._renderer.textureId}'); - } - - @override - Widget build(BuildContext context) { - return LayoutBuilder( - builder: (BuildContext context, BoxConstraints constraints) { - return Center( - child: Container( - width: constraints.maxWidth, - height: constraints.maxHeight, - child: widget._renderer.renderVideo - ? buildVideoElementView(widget.objectFit, widget.mirror) - : Container(), - )); - }); + return super.dispose(); } } diff --git a/lib/src/web/rtc_video_view_impl.dart b/lib/src/web/rtc_video_view_impl.dart new file mode 100644 index 0000000000..ccacb5bc84 --- /dev/null +++ b/lib/src/web/rtc_video_view_impl.dart @@ -0,0 +1,58 @@ +import 'package:flutter/material.dart'; + +import '../interface/enums.dart'; +import '../rtc_video_renderer.dart'; +import '../web/rtc_video_renderer_impl.dart'; + +class RTCVideoView extends StatefulWidget { + RTCVideoView( + this._renderer, { + Key key, + this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, + this.mirror = false, + }) : assert(objectFit != null), + assert(mirror != null), + super(key: key); + + final RTCVideoRenderer _renderer; + final RTCVideoViewObjectFit objectFit; + final bool mirror; + @override + _RTCVideoViewState createState() => _RTCVideoViewState(); +} + +class _RTCVideoViewState extends State { + _RTCVideoViewState(); + RTCVideoRendererWeb get videoRenderer => + widget._renderer.delegate as RTCVideoRendererWeb; + @override + void initState() { + super.initState(); + widget._renderer?.delegate?.addListener(() => setState(() {})); + } + + Widget buildVideoElementView(RTCVideoViewObjectFit objFit, bool mirror) { + // TODO(cloudwebrtc): Add css style for mirror. + videoRenderer.videoElement.style.objectFit = + objFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain + ? 'contain' + : 'cover'; + return HtmlElementView( + viewType: 'RTCVideoRenderer-${videoRenderer.textureId}'); + } + + @override + Widget build(BuildContext context) { + return LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) { + return Center( + child: Container( + width: constraints.maxWidth, + height: constraints.maxHeight, + child: widget._renderer.renderVideo + ? buildVideoElementView(widget.objectFit, widget.mirror) + : Container(), + )); + }); + } +} diff --git a/lib/src/web/utils.dart b/lib/src/web/utils.dart index fa2a78a496..3c78d50d78 100644 --- a/lib/src/web/utils.dart +++ b/lib/src/web/utils.dart @@ -1,7 +1,11 @@ +import 'package:flutter/services.dart'; + class WebRTC { static bool get platformIsDesktop => false; static bool get platformIsMobile => false; static bool get platformIsWeb => true; + + static MethodChannel methodChannel() => throw UnimplementedError; } From 3e07885801031fb80450c6243b354efe0ce78a3b Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 13 Oct 2020 16:20:43 +0800 Subject: [PATCH 22/26] Update. --- README.md | 2 +- lib/src/web/rtc_peerconnection_impl.dart | 17 +++++++++++++---- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 09bb76ab04..2a581d7b3f 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ WebRTC plugin for Flutter Mobile/Desktop/Web | Audio/Video | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | [WIP] | | | Data Channel | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | [WIP] | | | Screen Capture | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | | | | -| Unified-Plan | | | | | | | | +| Unified-Plan | | :heavy_check_mark: | :heavy_check_mark: | | | | | | MediaRecorder| :warning: | :warning: | :heavy_check_mark: | | | | | ## Usage diff --git a/lib/src/web/rtc_peerconnection_impl.dart b/lib/src/web/rtc_peerconnection_impl.dart index 5906855afe..ed3c12c38c 100644 --- a/lib/src/web/rtc_peerconnection_impl.dart +++ b/lib/src/web/rtc_peerconnection_impl.dart @@ -83,6 +83,12 @@ class RTCPeerConnectionWeb extends RTCPeerConnection { onSignalingState?.call(_signalingState); }); + js.JsObject.fromBrowserObject(_jsPc)['connectionstatechange'] = + js.JsFunction.withThis((_, state) { + _connectionState = peerConnectionStateForString(state); + onConnectionState.call(_connectionState); + }); + js.JsObject.fromBrowserObject(_jsPc)['negotiationneeded'] = js.JsFunction.withThis(() { onRenegotiationNeeded?.call(); @@ -104,6 +110,7 @@ class RTCPeerConnectionWeb extends RTCPeerConnection { RTCSignalingState _signalingState; RTCIceGatheringState _iceGatheringState; RTCIceConnectionState _iceConnectionState; + RTCPeerConnectionState _connectionState; @override RTCSignalingState get signalingState => _signalingState; @@ -114,6 +121,9 @@ class RTCPeerConnectionWeb extends RTCPeerConnection { @override RTCIceConnectionState get iceConnectionState => _iceConnectionState; + @override + RTCPeerConnectionState get connectionState => _connectionState; + @override Future dispose() { _jsPc.close(); @@ -265,6 +275,9 @@ class RTCPeerConnectionWeb extends RTCPeerConnection { @override Future addTrack(MediaStreamTrack track, [List streams]) { + var _track = track as MediaStreamTrackWeb; + var _stream = streams[0] as MediaStreamWeb; + _jsPc.addTrack(_track.jsTrack, _stream.jsStream); // TODO: implement addTrack throw UnimplementedError(); } @@ -275,10 +288,6 @@ class RTCPeerConnectionWeb extends RTCPeerConnection { throw UnimplementedError(); } - @override - // TODO: implement connectionState - RTCPeerConnectionState get connectionState => throw UnimplementedError(); - @override Future createSender(String kind, String streamId) { // TODO: implement createSender From c75a6218097d7bafbc32ec517e3b042e5c54ae50 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Tue, 13 Oct 2020 16:21:48 +0800 Subject: [PATCH 23/26] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2a581d7b3f..f3bf688ee6 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ WebRTC plugin for Flutter Mobile/Desktop/Web | Audio/Video | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | [WIP] | | | Data Channel | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | [WIP] | | | Screen Capture | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | | | | -| Unified-Plan | | :heavy_check_mark: | :heavy_check_mark: | | | | | +| Unified-Plan | :heavy_check_mark: | :heavy_check_mark: | | | | | | | MediaRecorder| :warning: | :warning: | :heavy_check_mark: | | | | | ## Usage From 07f6b8f7d9731c9ebb71218e3d53a84675085942 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 13 Oct 2020 16:26:15 +0800 Subject: [PATCH 24/26] Update. --- lib/src/web/factory_impl.dart | 3 --- 1 file changed, 3 deletions(-) diff --git a/lib/src/web/factory_impl.dart b/lib/src/web/factory_impl.dart index 12fbf07927..3e0269f006 100644 --- a/lib/src/web/factory_impl.dart +++ b/lib/src/web/factory_impl.dart @@ -2,9 +2,6 @@ import 'dart:async'; import 'dart:convert'; import 'dart:html' as html; -import 'package:flutter_webrtc/src/interface/rtc_dtmf_sender.dart'; -import 'package:flutter_webrtc/src/web/rtc_dtmf_sender_impl.dart'; - import '../interface/factory.dart'; import '../interface/media_recorder.dart'; import '../interface/media_stream.dart'; From 09fd6a027921351735579f51aea51ef1b32f47ec Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 13 Oct 2020 21:30:40 +0800 Subject: [PATCH 25/26] Use shared code in iOS and macOS. --- README.md | 2 +- .../darwin/Classes/FlutterRPScreenRecorder.h | 10 + .../darwin/Classes/FlutterRPScreenRecorder.m | 74 + common/darwin/Classes/FlutterRTCDataChannel.h | 28 + common/darwin/Classes/FlutterRTCDataChannel.m | 165 ++ .../darwin/Classes/FlutterRTCFrameCapturer.h | 12 + .../darwin/Classes/FlutterRTCFrameCapturer.m | 89 ++ common/darwin/Classes/FlutterRTCMediaStream.h | 29 + common/darwin/Classes/FlutterRTCMediaStream.m | 617 +++++++ .../darwin/Classes/FlutterRTCPeerConnection.h | 43 + .../darwin/Classes/FlutterRTCPeerConnection.m | 558 +++++++ .../darwin/Classes/FlutterRTCVideoRenderer.h | 33 + .../darwin/Classes/FlutterRTCVideoRenderer.m | 268 ++++ common/darwin/Classes/FlutterWebRTCPlugin.h | 34 + common/darwin/Classes/FlutterWebRTCPlugin.m | 1415 +++++++++++++++++ ios/Classes/FlutterRPScreenRecorder.h | 10 +- ios/Classes/FlutterRPScreenRecorder.m | 73 +- ios/Classes/FlutterRTCDataChannel.h | 29 +- ios/Classes/FlutterRTCDataChannel.m | 166 +- ios/Classes/FlutterRTCFrameCapturer.h | 9 +- ios/Classes/FlutterRTCFrameCapturer.m | 83 +- ios/Classes/FlutterRTCMediaStream.h | 30 +- ios/Classes/FlutterRTCMediaStream.m | 614 +------ ios/Classes/FlutterRTCPeerConnection.h | 44 +- ios/Classes/FlutterRTCPeerConnection.m | 559 +------ ios/Classes/FlutterRTCVideoRenderer.h | 34 +- ios/Classes/FlutterRTCVideoRenderer.m | 269 +--- ios/Classes/FlutterWebRTCPlugin.h | 34 +- ios/Classes/FlutterWebRTCPlugin.m | 1388 +--------------- macos/Classes/FlutterRPScreenRecorder.h | 1 + macos/Classes/FlutterRPScreenRecorder.m | 1 + macos/Classes/FlutterRTCDataChannel.h | 29 +- macos/Classes/FlutterRTCDataChannel.m | 166 +- macos/Classes/FlutterRTCFrameCapturer.h | 9 +- macos/Classes/FlutterRTCFrameCapturer.m | 82 +- macos/Classes/FlutterRTCMediaStream.h | 21 +- macos/Classes/FlutterRTCMediaStream.m | 558 +------ macos/Classes/FlutterRTCPeerConnection.h | 44 +- macos/Classes/FlutterRTCPeerConnection.m | 505 +----- macos/Classes/FlutterRTCVideoRenderer.h | 29 +- macos/Classes/FlutterRTCVideoRenderer.m | 295 +--- macos/Classes/FlutterWebRTCPlugin.h | 24 +- macos/Classes/FlutterWebRTCPlugin.m | 722 +-------- macos/Classes/FlutterWebRTCPlugin.swift | 22 - 44 files changed, 3404 insertions(+), 5823 deletions(-) create mode 100644 common/darwin/Classes/FlutterRPScreenRecorder.h create mode 100644 common/darwin/Classes/FlutterRPScreenRecorder.m create mode 100755 common/darwin/Classes/FlutterRTCDataChannel.h create mode 100755 common/darwin/Classes/FlutterRTCDataChannel.m create mode 100644 common/darwin/Classes/FlutterRTCFrameCapturer.h create mode 100644 common/darwin/Classes/FlutterRTCFrameCapturer.m create mode 100644 common/darwin/Classes/FlutterRTCMediaStream.h create mode 100755 common/darwin/Classes/FlutterRTCMediaStream.m create mode 100755 common/darwin/Classes/FlutterRTCPeerConnection.h create mode 100755 common/darwin/Classes/FlutterRTCPeerConnection.m create mode 100755 common/darwin/Classes/FlutterRTCVideoRenderer.h create mode 100755 common/darwin/Classes/FlutterRTCVideoRenderer.m create mode 100644 common/darwin/Classes/FlutterWebRTCPlugin.h create mode 100644 common/darwin/Classes/FlutterWebRTCPlugin.m mode change 100644 => 120000 ios/Classes/FlutterRPScreenRecorder.h mode change 100644 => 120000 ios/Classes/FlutterRPScreenRecorder.m mode change 100755 => 120000 ios/Classes/FlutterRTCDataChannel.h mode change 100755 => 120000 ios/Classes/FlutterRTCDataChannel.m mode change 100644 => 120000 ios/Classes/FlutterRTCFrameCapturer.h mode change 100644 => 120000 ios/Classes/FlutterRTCFrameCapturer.m mode change 100644 => 120000 ios/Classes/FlutterRTCMediaStream.h mode change 100755 => 120000 ios/Classes/FlutterRTCMediaStream.m mode change 100755 => 120000 ios/Classes/FlutterRTCPeerConnection.h mode change 100755 => 120000 ios/Classes/FlutterRTCPeerConnection.m mode change 100755 => 120000 ios/Classes/FlutterRTCVideoRenderer.h mode change 100755 => 120000 ios/Classes/FlutterRTCVideoRenderer.m mode change 100644 => 120000 ios/Classes/FlutterWebRTCPlugin.h mode change 100644 => 120000 ios/Classes/FlutterWebRTCPlugin.m create mode 120000 macos/Classes/FlutterRPScreenRecorder.h create mode 120000 macos/Classes/FlutterRPScreenRecorder.m mode change 100755 => 120000 macos/Classes/FlutterRTCDataChannel.h mode change 100755 => 120000 macos/Classes/FlutterRTCDataChannel.m mode change 100644 => 120000 macos/Classes/FlutterRTCFrameCapturer.h mode change 100644 => 120000 macos/Classes/FlutterRTCFrameCapturer.m mode change 100644 => 120000 macos/Classes/FlutterRTCMediaStream.h mode change 100755 => 120000 macos/Classes/FlutterRTCMediaStream.m mode change 100755 => 120000 macos/Classes/FlutterRTCPeerConnection.h mode change 100755 => 120000 macos/Classes/FlutterRTCPeerConnection.m mode change 100755 => 120000 macos/Classes/FlutterRTCVideoRenderer.h mode change 100755 => 120000 macos/Classes/FlutterRTCVideoRenderer.m mode change 100644 => 120000 macos/Classes/FlutterWebRTCPlugin.h mode change 100644 => 120000 macos/Classes/FlutterWebRTCPlugin.m delete mode 100644 macos/Classes/FlutterWebRTCPlugin.swift diff --git a/README.md b/README.md index f3bf688ee6..6be3b4f784 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ WebRTC plugin for Flutter Mobile/Desktop/Web | Audio/Video | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | [WIP] | | | Data Channel | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | [WIP] | | | Screen Capture | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | | | | -| Unified-Plan | :heavy_check_mark: | :heavy_check_mark: | | | | | | +| Unified-Plan | :heavy_check_mark: | :heavy_check_mark: | | :heavy_check_mark: | | | | | MediaRecorder| :warning: | :warning: | :heavy_check_mark: | | | | | ## Usage diff --git a/common/darwin/Classes/FlutterRPScreenRecorder.h b/common/darwin/Classes/FlutterRPScreenRecorder.h new file mode 100644 index 0000000000..8b3bec13c1 --- /dev/null +++ b/common/darwin/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1,10 @@ +#import +#if TARGET_OS_IPHONE +@interface FlutterRPScreenRecorder : RTCVideoCapturer + +-(void)startCapture; + +-(void)stopCapture; + +@end +#endif diff --git a/common/darwin/Classes/FlutterRPScreenRecorder.m b/common/darwin/Classes/FlutterRPScreenRecorder.m new file mode 100644 index 0000000000..fb421b7b52 --- /dev/null +++ b/common/darwin/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1,74 @@ +#import "FlutterRPScreenRecorder.h" +#if TARGET_OS_IPHONE +#import + +//See: https://developer.apple.com/videos/play/wwdc2017/606/ + +@implementation FlutterRPScreenRecorder { + RPScreenRecorder *screenRecorder; + RTCVideoSource *source; +} + +- (instancetype)initWithDelegate:(__weak id)delegate { + source = delegate; + return [super initWithDelegate:delegate]; +} + +-(void)startCapture +{ + if(screenRecorder == NULL) + screenRecorder = [RPScreenRecorder sharedRecorder]; + + [screenRecorder setMicrophoneEnabled:NO]; + + if (![screenRecorder isAvailable]) { + NSLog(@"Screen recorder is not available!"); + return; + } + + [screenRecorder startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) { + if (bufferType == RPSampleBufferTypeVideo) {// We want video only now + [self handleSourceBuffer:sampleBuffer sampleType:bufferType]; + } + } completionHandler:^(NSError * _Nullable error) { + if (error != nil) + NSLog(@"!!! startCaptureWithHandler/completionHandler %@ !!!", error); + }]; +} + +-(void)stopCapture +{ + [screenRecorder stopCaptureWithHandler:^(NSError * _Nullable error) { + if (error != nil) + NSLog(@"!!! stopCaptureWithHandler/completionHandler %@ !!!", error); + }]; +} + +-(void)handleSourceBuffer:(CMSampleBufferRef)sampleBuffer sampleType:(RPSampleBufferType)sampleType +{ + if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || + !CMSampleBufferDataIsReady(sampleBuffer)) { + return; + } + + CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + if (pixelBuffer == nil) { + return; + } + + size_t width = CVPixelBufferGetWidth(pixelBuffer); + size_t height = CVPixelBufferGetHeight(pixelBuffer); + + [source adaptOutputFormatToWidth:width/2 height:height/2 fps:8]; + + RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + int64_t timeStampNs = + CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC; + RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer + rotation:RTCVideoRotation_0 + timeStampNs:timeStampNs]; + [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; +} + +@end +#endif diff --git a/common/darwin/Classes/FlutterRTCDataChannel.h b/common/darwin/Classes/FlutterRTCDataChannel.h new file mode 100755 index 0000000000..c2e039f072 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDataChannel.h @@ -0,0 +1,28 @@ +#import "FlutterWebRTCPlugin.h" +#import + +@interface RTCDataChannel (Flutter) +@property (nonatomic, strong) NSString *peerConnectionId; +@property (nonatomic, strong) NSNumber *flutterChannelId; +@property (nonatomic, strong) FlutterEventSink eventSink; +@property (nonatomic, strong) FlutterEventChannel* eventChannel; +@end + +@interface FlutterWebRTCPlugin (RTCDataChannel) + + +-(void)createDataChannel:(nonnull NSString *)peerConnectionId + label:(nonnull NSString *)label + config:(nonnull RTCDataChannelConfiguration *)config + messenger:(NSObject*)messenger; + +-(void)dataChannelClose:(nonnull NSString *)peerConnectionId + dataChannelId:(nonnull NSString *)dataChannelId; + + +-(void)dataChannelSend:(nonnull NSString *)peerConnectionId + dataChannelId:(nonnull NSString *)dataChannelId + data:(nonnull NSString *)data + type:(nonnull NSString *)type; + +@end diff --git a/common/darwin/Classes/FlutterRTCDataChannel.m b/common/darwin/Classes/FlutterRTCDataChannel.m new file mode 100755 index 0000000000..e408c53076 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCDataChannel.m @@ -0,0 +1,165 @@ +#import +#import "FlutterRTCDataChannel.h" +#import "FlutterRTCPeerConnection.h" +#import + +@implementation RTCDataChannel (Flutter) + +- (NSString *)peerConnectionId +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setPeerConnectionId:(NSString *)peerConnectionId +{ + objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventSink )eventSink +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink +{ + objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSNumber *)flutterChannelId +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setFlutterChannelId:(NSNumber *)flutterChannelId +{ + objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel *)eventChannel +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel *)eventChannel +{ + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (RTCDataChannel) + +-(void)createDataChannel:(nonnull NSString *)peerConnectionId + label:(NSString *)label + config:(RTCDataChannelConfiguration *)config + messenger:(NSObject*)messenger +{ + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel *dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; + + if (nil != dataChannel) { + dataChannel.peerConnectionId = peerConnectionId; + NSNumber *dataChannelId = [NSNumber numberWithInteger:config.channelId]; + peerConnection.dataChannels[dataChannelId] = dataChannel; + dataChannel.flutterChannelId = dataChannelId; + dataChannel.delegate = self; + + FlutterEventChannel *eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnectionId, [dataChannelId intValue]] + binaryMessenger:messenger]; + + dataChannel.eventChannel = eventChannel; + [eventChannel setStreamHandler:dataChannel]; + } +} + +-(void)dataChannelClose:(nonnull NSString *)peerConnectionId + dataChannelId:(nonnull NSString *)dataChannelId +{ + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + NSMutableDictionary *dataChannels = peerConnection.dataChannels; + RTCDataChannel *dataChannel = dataChannels[dataChannelId]; + FlutterEventChannel *eventChannel = dataChannel.eventChannel; + [eventChannel setStreamHandler:nil]; + dataChannel.eventChannel = nil; + [dataChannel close]; + [dataChannels removeObjectForKey:dataChannelId]; +} + +-(void)dataChannelSend:(nonnull NSString *)peerConnectionId + dataChannelId:(nonnull NSString *)dataChannelId + data:(id)data + type:(NSString *)type +{ + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel *dataChannel = peerConnection.dataChannels[dataChannelId]; + + NSData *bytes = [type isEqualToString:@"binary"] ? + ((FlutterStandardTypedData*)data).data : + [data dataUsingEncoding:NSUTF8StringEncoding]; + + RTCDataBuffer *buffer = [[RTCDataBuffer alloc] initWithData:bytes isBinary:[type isEqualToString:@"binary"]]; + [dataChannel sendData:buffer]; +} + +- (NSString *)stringForDataChannelState:(RTCDataChannelState)state +{ + switch (state) { + case RTCDataChannelStateConnecting: return @"connecting"; + case RTCDataChannelStateOpen: return @"open"; + case RTCDataChannelStateClosing: return @"closing"; + case RTCDataChannelStateClosed: return @"closed"; + } + return nil; +} + +#pragma mark - RTCDataChannelDelegate methods + +// Called when the data channel state has changed. +- (void)dataChannelDidChangeState:(RTCDataChannel*)channel +{ + RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; + FlutterEventSink eventSink = channel.eventSink; + if(eventSink) { + eventSink(@{ @"event" : @"dataChannelStateChanged", + @"id": channel.flutterChannelId, + @"state": [self stringForDataChannelState:channel.readyState]}); + } +} + +// Called when a data buffer was successfully received. +- (void)dataChannel:(RTCDataChannel *)channel didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer +{ + NSString *type; + id data; + if (buffer.isBinary) { + type = @"binary"; + data = [FlutterStandardTypedData typedDataWithBytes:buffer.data]; + } else { + type = @"text"; + data = [[NSString alloc] initWithData:buffer.data + encoding:NSUTF8StringEncoding]; + } + RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; + FlutterEventSink eventSink = channel.eventSink; + if(eventSink) { + eventSink(@{ @"event" : @"dataChannelReceiveMessage", + @"id": channel.flutterChannelId, + @"type": type, + @"data": (data ? data : [NSNull null])}); + } +} + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.h b/common/darwin/Classes/FlutterRTCFrameCapturer.h new file mode 100644 index 0000000000..a3ae4f8ab0 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1,12 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_MAC +#import +#endif +#import + +@interface FlutterRTCFrameCapturer : NSObject + +- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result; + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.m b/common/darwin/Classes/FlutterRTCFrameCapturer.m new file mode 100644 index 0000000000..d06444077b --- /dev/null +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1,89 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_MAC +#import +#endif + + +#import "FlutterRTCFrameCapturer.h" + +#include "libyuv.h" + +@import CoreImage; +@import CoreVideo; + +@implementation FlutterRTCFrameCapturer { + RTCVideoTrack* _track; + NSString* _path; + FlutterResult _result; + bool _gotFrame; +} + +- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result +{ + self = [super init]; + if (self) { + _gotFrame = false; + _track = track; + _path = path; + _result = result; + [track addRenderer:self]; + } + return self; +} + +- (void)setSize:(CGSize)size +{ +} + +- (void)renderFrame:(nullable RTCVideoFrame *)frame +{ +#if TARGET_OS_IPHONE + if (_gotFrame || frame == nil) return; + _gotFrame = true; + + id buffer = frame.buffer; + CVPixelBufferRef pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; + + CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; + CIContext *context = [CIContext contextWithOptions:nil]; + CGImageRef cgImage = [context createCGImage:ciImage + fromRect:CGRectMake(0, 0, frame.width, frame.height)]; + + UIImageOrientation orientation; + switch (frame.rotation) { + case RTCVideoRotation_90: + orientation = UIImageOrientationRight; + break; + case RTCVideoRotation_180: + orientation = UIImageOrientationDown; + break; + case RTCVideoRotation_270: + orientation = UIImageOrientationLeft; + default: + orientation = UIImageOrientationUp; + break; + } + + UIImage *uiImage = [UIImage imageWithCGImage:cgImage scale:1 orientation:orientation]; + CGImageRelease(cgImage); + NSData *jpgData = UIImageJPEGRepresentation(uiImage, 0.9f); + + if ([jpgData writeToFile:_path atomically:NO]) { + NSLog(@"File writed successfully to %@", _path); + _result(nil); + } else { + NSLog(@"Failed to write to file"); + _result([FlutterError errorWithCode:@"CaptureFrameFailed" + message:@"Failed to write JPEG data to file" + details:nil]); + } + + dispatch_async(dispatch_get_main_queue(), ^{ + [self->_track removeRenderer:self]; + self->_track = nil; + }); +#endif +} + +@end diff --git a/common/darwin/Classes/FlutterRTCMediaStream.h b/common/darwin/Classes/FlutterRTCMediaStream.h new file mode 100644 index 0000000000..12f1633cde --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaStream.h @@ -0,0 +1,29 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (RTCMediaStream) + +-(void)getUserMedia:(NSDictionary *)constraints + result:(FlutterResult)result; + +-(void)getDisplayMedia:(NSDictionary *)constraints + result:(FlutterResult)result; + +-(void)createLocalMediaStream:(FlutterResult)result; + +-(void)getSources:(FlutterResult)result; + +-(void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack *)track + result:(FlutterResult) result; + +-(void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack *)track + torch:(BOOL) torch + result:(FlutterResult) result; + +-(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track + result:(FlutterResult) result; + +-(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track + toPath:(NSString *) path + result:(FlutterResult) result; +@end diff --git a/common/darwin/Classes/FlutterRTCMediaStream.m b/common/darwin/Classes/FlutterRTCMediaStream.m new file mode 100755 index 0000000000..c48ae37b25 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaStream.m @@ -0,0 +1,617 @@ +#import + +#import + +#import "FlutterRTCFrameCapturer.h" +#import "FlutterRTCMediaStream.h" +#import "FlutterRTCPeerConnection.h" + +#if TARGET_OS_IPHONE +#import "FlutterRPScreenRecorder.h" +#endif + +@implementation AVCaptureDevice (Flutter) + +- (NSString*)positionString { + switch (self.position) { + case AVCaptureDevicePositionUnspecified: return @"unspecified"; + case AVCaptureDevicePositionBack: return @"back"; + case AVCaptureDevicePositionFront: return @"front"; + } + return nil; +} + +@end + +@implementation FlutterWebRTCPlugin (RTCMediaStream) + +/** + * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} + */ +typedef void (^NavigatorUserMediaErrorCallback)(NSString *errorType, NSString *errorMessage); + +/** + * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} + */ +typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream *mediaStream); + +- (RTCMediaConstraints *)defaultMediaStreamConstraints { + NSDictionary *mandatoryConstraints + = @{ @"minWidth" : @"1280", + @"minHeight" : @"720", + @"minFrameRate" : @"30" }; + RTCMediaConstraints* constraints = + [[RTCMediaConstraints alloc] + initWithMandatoryConstraints:mandatoryConstraints + optionalConstraints:nil]; + return constraints; +} + +/** + * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, + * adds it to a specific {@link RTCMediaStream}, and reports success to a + * specific callback. Implements the audio-specific counterpart of the + * {@code getUserMedia()} algorithm. + * + * @param constraints The {@code MediaStreamConstraints} which the new + * {@code RTCAudioTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm, to which a + * new {@code RTCAudioTrack} is to be added, and which is to be reported to + * {@code successCallback} upon success. + */ +- (void)getUserAudio:(NSDictionary *)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream *)mediaStream { + NSString *trackId = [[NSUUID UUID] UUIDString]; + RTCAudioTrack *audioTrack + = [self.peerConnectionFactory audioTrackWithTrackId:trackId]; + + [mediaStream addAudioTrack:audioTrack]; + + successCallback(mediaStream); +} + +// TODO: Use RCTConvert for constraints ... +-(void)getUserMedia:(NSDictionary *)constraints + result:(FlutterResult) result { + // Initialize RTCMediaStream with a unique label in order to allow multiple + // RTCMediaStream instances initialized by multiple getUserMedia calls to be + // added to 1 RTCPeerConnection instance. As suggested by + // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good + // practice, use a UUID (conforming to RFC4122). + NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream *mediaStream + = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + [self + getUserMedia:constraints + successCallback:^ (RTCMediaStream *mediaStream) { + NSString *mediaStreamId = mediaStream.streamId; + + NSMutableArray *audioTracks = [NSMutableArray array]; + NSMutableArray *videoTracks = [NSMutableArray array]; + + for (RTCAudioTrack *track in mediaStream.audioTracks) { + [self.localTracks setObject:track forKey:track.trackId]; + [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; + } + + for (RTCVideoTrack *track in mediaStream.videoTracks) { + [self.localTracks setObject:track forKey:track.trackId]; + [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; + } + + self.localStreams[mediaStreamId] = mediaStream; + result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); + } + errorCallback:^ (NSString *errorType, NSString *errorMessage) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] + message:errorMessage + details:nil]); + } + mediaStream:mediaStream]; +} + +/** + * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which + * satisfies specific constraints and adds it to a specific + * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track + * of the respective media type and the specified {@code constraints} specify + * that a track of the respective media type is required; otherwise, reports + * success for the specified {@code mediaStream} to a specific + * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media + * type-specific iteration of or successfully concludes the + * {@code getUserMedia()} algorithm. The method will be recursively invoked to + * conclude the whole {@code getUserMedia()} algorithm either with (successful) + * satisfaction of the specified {@code constraints} or with failure. + * + * @param constraints The {@code MediaStreamConstraints} which specifies the + * requested media types and which the new {@code RTCAudioTrack} or + * {@code RTCVideoTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm. + */ +- (void)getUserMedia:(NSDictionary *)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream *)mediaStream { + // If mediaStream contains no audioTracks and the constraints request such a + // track, then run an iteration of the getUserMedia() algorithm to obtain + // local audio content. + if (mediaStream.audioTracks.count == 0) { + // constraints.audio + id audioConstraints = constraints[@"audio"]; + BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; + if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { + [self requestAccessForMediaType:AVMediaTypeAudio + constraints:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + return; + } + } + + // If mediaStream contains no videoTracks and the constraints request such a + // track, then run an iteration of the getUserMedia() algorithm to obtain + // local video content. + if (mediaStream.videoTracks.count == 0) { + // constraints.video + id videoConstraints = constraints[@"video"]; + if (videoConstraints) { + BOOL requestAccessForVideo + = [videoConstraints isKindOfClass:[NSNumber class]] + ? [videoConstraints boolValue] + : [videoConstraints isKindOfClass:[NSDictionary class]]; +#if !TARGET_IPHONE_SIMULATOR + if (requestAccessForVideo) { + [self requestAccessForMediaType:AVMediaTypeVideo + constraints:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + return; + } +#endif + } + } + + // There are audioTracks and/or videoTracks in mediaStream as requested by + // constraints so the getUserMedia() is to conclude with success. + successCallback(mediaStream); +} + +/** + * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, + * adds it to a specific {@link RTCMediaStream}, and reports success to a + * specific callback. Implements the video-specific counterpart of the + * {@code getUserMedia()} algorithm. + * + * @param constraints The {@code MediaStreamConstraints} which the new + * {@code RTCVideoTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm, to which a + * new {@code RTCVideoTrack} is to be added, and which is to be reported to + * {@code successCallback} upon success. + */ +- (void)getUserVideo:(NSDictionary *)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream *)mediaStream { + id videoConstraints = constraints[@"video"]; + AVCaptureDevice *videoDevice; + if ([videoConstraints isKindOfClass:[NSDictionary class]]) { + // constraints.video.optional + id optionalVideoConstraints = videoConstraints[@"optional"]; + if (optionalVideoConstraints + && [optionalVideoConstraints isKindOfClass:[NSArray class]]) { + NSArray *options = optionalVideoConstraints; + for (id item in options) { + if ([item isKindOfClass:[NSDictionary class]]) { + NSString *sourceId = ((NSDictionary *)item)[@"sourceId"]; + if (sourceId) { + videoDevice = [AVCaptureDevice deviceWithUniqueID:sourceId]; + if (videoDevice) { + break; + } + } + } + } + } + if (!videoDevice) { + // constraints.video.facingMode + // + // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode + id facingMode = videoConstraints[@"facingMode"]; + if (facingMode && [facingMode isKindOfClass:[NSString class]]) { + AVCaptureDevicePosition position; + if ([facingMode isEqualToString:@"environment"]) { + self._usingFrontCamera = NO; + position = AVCaptureDevicePositionBack; + } else if ([facingMode isEqualToString:@"user"]) { + self._usingFrontCamera = YES; + position = AVCaptureDevicePositionFront; + } else { + // If the specified facingMode value is not supported, fall back to + // the default video device. + self._usingFrontCamera = NO; + position = AVCaptureDevicePositionUnspecified; + } + videoDevice = [self findDeviceForPosition:position]; + } + } + if (!videoDevice) { + videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + } + + //TODO(rostopira): refactor to separate function and add support for max + + self._targetWidth = 1280; + self._targetHeight = 720; + self._targetFps = 30; + + id mandatory = videoConstraints[@"mandatory"]; + // constraints.video.mandatory + if(mandatory && [mandatory isKindOfClass:[NSDictionary class]]) + { + id widthConstraint = mandatory[@"minWidth"]; + if ([widthConstraint isKindOfClass:[NSString class]]) { + int possibleWidth = [widthConstraint intValue]; + if (possibleWidth != 0) { + self._targetWidth = possibleWidth; + } + } + id heightConstraint = mandatory[@"minHeight"]; + if ([heightConstraint isKindOfClass:[NSString class]]) { + int possibleHeight = [heightConstraint intValue]; + if (possibleHeight != 0) { + self._targetHeight = possibleHeight; + } + } + id fpsConstraint = mandatory[@"minFrameRate"]; + if ([fpsConstraint isKindOfClass:[NSString class]]) { + int possibleFps = [fpsConstraint intValue]; + if (possibleFps != 0) { + self._targetFps = possibleFps; + } + } + } + + if (videoDevice) { + RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; + if (self.videoCapturer) { + [self.videoCapturer stopCapture]; + } + self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource]; + AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; + NSInteger selectedFps = [self selectFpsForFormat:selectedFormat]; + [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:selectedFps completionHandler:^(NSError *error) { + if (error) { + NSLog(@"Start capture error: %@", [error localizedDescription]); + } + }]; + + NSString *trackUUID = [[NSUUID UUID] UUIDString]; + RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; + [mediaStream addVideoTrack:videoTrack]; + + successCallback(mediaStream); + } else { + // According to step 6.2.3 of the getUserMedia() algorithm, if there is no + // source, fail with a new OverconstrainedError. + errorCallback(@"OverconstrainedError", /* errorMessage */ nil); + } +} + +-(void)mediaStreamRelease:(RTCMediaStream *)stream +{ + if (stream) { + for (RTCVideoTrack *track in stream.videoTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + for (RTCAudioTrack *track in stream.audioTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + [self.localStreams removeObjectForKey:stream.streamId]; + } +} + + +/** + * Obtains local media content of a specific type. Requests access for the + * specified {@code mediaType} if necessary. In other words, implements a media + * type-specific iteration of the {@code getUserMedia()} algorithm. + * + * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} + * which specifies the type of the local media content to obtain. + * @param constraints The {@code MediaStreamConstraints} which are to be + * satisfied by the obtained local media content. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is to collect the + * obtained local media content of the specified {@code mediaType}. + */ +- (void)requestAccessForMediaType:(NSString *)mediaType + constraints:(NSDictionary *)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream *)mediaStream { + // According to step 6.2.1 of the getUserMedia() algorithm, if there is no + // source, fail "with a new DOMException object whose name attribute has the + // value NotFoundError." + // XXX The following approach does not work for audio in Simulator. That is + // because audio capture is done using AVAudioSession which does not use + // AVCaptureDevice there. Anyway, Simulator will not (visually) request access + // for audio. + if (mediaType == AVMediaTypeVideo + && [AVCaptureDevice devicesWithMediaType:mediaType].count == 0) { + // Since successCallback and errorCallback are asynchronously invoked + // elsewhere, make sure that the invocation here is consistent. + dispatch_async(dispatch_get_main_queue(), ^ { + errorCallback(@"DOMException", @"NotFoundError"); + }); + return; + } + + [AVCaptureDevice + requestAccessForMediaType:mediaType + completionHandler:^ (BOOL granted) { + dispatch_async(dispatch_get_main_queue(), ^ { + if (granted) { + NavigatorUserMediaSuccessCallback scb + = ^ (RTCMediaStream *mediaStream) { + [self getUserMedia:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + }; + + if (mediaType == AVMediaTypeAudio) { + [self getUserAudio:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } else if (mediaType == AVMediaTypeVideo) { + [self getUserVideo:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } + } else { + // According to step 10 Permission Failure of the getUserMedia() + // algorithm, if the user has denied permission, fail "with a new + // DOMException object whose name attribute has the value + // NotAllowedError." + errorCallback(@"DOMException", @"NotAllowedError"); + } + }); + }]; +} + +#if TARGET_OS_IPHONE +-(void)getDisplayMedia:(NSDictionary *)constraints + result:(FlutterResult)result { + NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; + FlutterRPScreenRecorder *screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource]; + + [screenCapturer startCapture]; + + //TODO: + self.videoCapturer = screenCapturer; + + NSString *trackUUID = [[NSUUID UUID] UUIDString]; + RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; + [mediaStream addVideoTrack:videoTrack]; + + NSMutableArray *audioTracks = [NSMutableArray array]; + NSMutableArray *videoTracks = [NSMutableArray array]; + + for (RTCVideoTrack *track in mediaStream.videoTracks) { + [self.localTracks setObject:track forKey:track.trackId]; + [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; + } + + self.localStreams[mediaStreamId] = mediaStream; + result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); +} +#endif +-(void)createLocalMediaStream:(FlutterResult)result{ + NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + self.localStreams[mediaStreamId] = mediaStream; + result(@{@"streamId": [mediaStream streamId] }); +} + +-(void)getSources:(FlutterResult)result{ + NSMutableArray *sources = [NSMutableArray array]; + NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + for (AVCaptureDevice *device in videoDevices) { + [sources addObject:@{ + @"facing": device.positionString, + @"deviceId": device.uniqueID, + @"label": device.localizedName, + @"kind": @"videoinput", + }]; + } + NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; + for (AVCaptureDevice *device in audioDevices) { + [sources addObject:@{ + @"facing": @"", + @"deviceId": device.uniqueID, + @"label": device.localizedName, + @"kind": @"audioinput", + }]; + } + result(@{@"sources": sources}); +} + +-(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track +{ + // what's different to mediaStreamTrackStop? only call mediaStream explicitly? + if (mediaStream && track) { + track.isEnabled = NO; + // FIXME this is called when track is removed from the MediaStream, + // but it doesn't mean it can not be added back using MediaStream.addTrack + //TODO: [self.localTracks removeObjectForKey:trackID]; + if ([track.kind isEqualToString:@"audio"]) { + [mediaStream removeAudioTrack:(RTCAudioTrack *)track]; + } else if([track.kind isEqualToString:@"video"]) { + [mediaStream removeVideoTrack:(RTCVideoTrack *)track]; + } + } +} + +-(void)mediaStreamTrackSetEnabled:(RTCMediaStreamTrack *)track : (BOOL)enabled +{ + if (track && track.isEnabled != enabled) { + track.isEnabled = enabled; + } +} + +-(void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack *)track result:(FlutterResult) result +{ + if (!self.videoCapturer) { + result(@NO); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + result(@NO); + return; + } + + AVCaptureDeviceInput *deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice *device = deviceInput.device; + + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +} + +-(void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack *)track torch:(BOOL)torch result:(FlutterResult)result +{ + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't set torch"); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + NSLog(@"Video capturer is missing an input. Can't set torch"); + return; + } + + AVCaptureDeviceInput *deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice *device = deviceInput.device; + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + return; + } + + NSError *error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +-(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track result:(FlutterResult)result +{ + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't switch camera"); + return; + } + self._usingFrontCamera = !self._usingFrontCamera; + AVCaptureDevicePosition position = self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; + AVCaptureDevice *videoDevice = [self findDeviceForPosition:position]; + AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; + [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:[self selectFpsForFormat:selectedFormat] completionHandler:^(NSError* error){ + if (error != nil) { + result([FlutterError errorWithCode:@"Error while switching camera" message:@"Error while switching camera" details:error]); + } else { + result([NSNumber numberWithBool:self._usingFrontCamera]); + } + }]; +} + +-(void)mediaStreamTrackCaptureFrame:(RTCVideoTrack *)track toPath:(NSString *) path result:(FlutterResult)result +{ + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't capture frame."); + return; + } + + FlutterRTCFrameCapturer *capturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track toPath:path result:result]; +} + +-(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track +{ + if (track) { + track.isEnabled = NO; + [self.localTracks removeObjectForKey:track.trackId]; + } +} + +- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice *device in captureDevices) { + if (device.position == position) { + return device; + } + } + return captureDevices[0]; +} + +- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device { + NSArray *formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat *selectedFormat = nil; + int currentDiff = INT_MAX; + for (AVCaptureDeviceFormat *format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + int diff = abs(self._targetWidth - dimension.width) + abs(self._targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, self._targetFps); +} + +@end diff --git a/common/darwin/Classes/FlutterRTCPeerConnection.h b/common/darwin/Classes/FlutterRTCPeerConnection.h new file mode 100755 index 0000000000..b99f885b0a --- /dev/null +++ b/common/darwin/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1,43 @@ +#import "FlutterWebRTCPlugin.h" + +@interface RTCPeerConnection (Flutter) +@property (nonatomic, strong) NSMutableDictionary *dataChannels; +@property (nonatomic, strong) NSMutableDictionary *remoteStreams; +@property (nonatomic, strong) NSMutableDictionary *remoteTracks; +@property (nonatomic, strong) NSString *flutterId; +@property (nonatomic, strong) FlutterEventSink eventSink; +@property (nonatomic, strong) FlutterEventChannel* eventChannel; +@end + +@interface FlutterWebRTCPlugin (RTCPeerConnection) + +-(void) peerConnectionCreateOffer:(NSDictionary *)constraints + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result; + +-(void) peerConnectionCreateAnswer:(NSDictionary *)constraints + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result; + +-(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result; + +-(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result; + +-(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result; + +-(void) peerConnectionGetStats:(nonnull NSString *)trackID + peerConnection:(nonnull RTCPeerConnection *)peerConnection + result:(nonnull FlutterResult)result; + +-(RTCMediaConstraints *) parseMediaConstraints:(nonnull NSDictionary *)constraints; + +-(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration + peerConnection:(RTCPeerConnection*)peerConnection; + +@end diff --git a/common/darwin/Classes/FlutterRTCPeerConnection.m b/common/darwin/Classes/FlutterRTCPeerConnection.m new file mode 100755 index 0000000000..a265d8e8a5 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1,558 @@ +#import +#import "FlutterWebRTCPlugin.h" +#import "FlutterRTCPeerConnection.h" +#import "FlutterRTCDataChannel.h" + +#import + +@implementation RTCPeerConnection (Flutter) + +@dynamic eventSink; + +- (NSString *)flutterId +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setFlutterId:(NSString *)flutterId +{ + objc_setAssociatedObject(self, @selector(flutterId), flutterId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventSink)eventSink +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink +{ + objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel *)eventChannel +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel *)eventChannel +{ + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary *)dataChannels +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setDataChannels:(NSMutableDictionary *)dataChannels +{ + objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary *)remoteStreams +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setRemoteStreams:(NSMutableDictionary *)remoteStreams +{ + objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary *)remoteTracks +{ + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setRemoteTracks:(NSMutableDictionary *)remoteTracks +{ + objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + return nil; +} + +@end + +@implementation FlutterWebRTCPlugin (RTCPeerConnection) + +-(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration + peerConnection:(RTCPeerConnection*)peerConnection +{ + [peerConnection setConfiguration:configuration]; +} + +-(void) peerConnectionCreateOffer:(NSDictionary *)constraints + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result +{ + [peerConnection + offerForConstraints:[self parseMediaConstraints:constraints] + completionHandler:^(RTCSessionDescription *sdp, NSError *error) { + if (error) { + result([FlutterError errorWithCode:@"CreateOfferFailed" + message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] + details:nil]); + } else { + NSString *type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp": sdp.sdp, @"type": type}); + } + }]; +} + +-(void) peerConnectionCreateAnswer:(NSDictionary *)constraints + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result +{ + [peerConnection + answerForConstraints:[self parseMediaConstraints:constraints] + completionHandler:^(RTCSessionDescription *sdp, NSError *error) { + if (error) { + result([FlutterError errorWithCode:@"CreateAnswerFailed" + message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] + details:nil]); + } else { + NSString *type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp": sdp.sdp, @"type": type}); + } + }]; +} + +-(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result +{ + [peerConnection setLocalDescription:sdp completionHandler: ^(NSError *error) { + if (error) { + result([FlutterError errorWithCode:@"SetLocalDescriptionFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +-(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result +{ + [peerConnection setRemoteDescription: sdp completionHandler: ^(NSError *error) { + if (error) { + result([FlutterError errorWithCode:@"SetRemoteDescriptionFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +-(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate + peerConnection:(RTCPeerConnection *)peerConnection + result:(FlutterResult)result +{ + [peerConnection addIceCandidate:candidate]; + result(nil); + //NSLog(@"addICECandidateresult: %@", candidate); +} + +-(void) peerConnectionClose:(RTCPeerConnection *)peerConnection +{ + [peerConnection close]; + + // Clean up peerConnection's streams and tracks + [peerConnection.remoteStreams removeAllObjects]; + [peerConnection.remoteTracks removeAllObjects]; + + // Clean up peerConnection's dataChannels. + NSMutableDictionary *dataChannels + = peerConnection.dataChannels; + for (NSString *dataChannelId in dataChannels) { + dataChannels[dataChannelId].delegate = nil; + // There is no need to close the RTCDataChannel because it is owned by the + // RTCPeerConnection and the latter will close the former. + } + [dataChannels removeAllObjects]; +} + +-(void) peerConnectionGetStats:(nonnull NSString *)trackID + peerConnection:(nonnull RTCPeerConnection *)peerConnection + result:(nonnull FlutterResult)result +{ + RTCMediaStreamTrack *track = nil; + if (!trackID + || !trackID.length + || (track = self.localTracks[trackID]) + || (track = peerConnection.remoteTracks[trackID])) { + [peerConnection statsForTrack:track + statsOutputLevel:RTCStatsOutputLevelStandard + completionHandler:^(NSArray *reports) { + + NSMutableArray *stats = [NSMutableArray array]; + + for (RTCLegacyStatsReport *report in reports) { + [stats addObject:@{@"id": report.reportId, + @"type": report.type, + @"timestamp": @(report.timestamp), + @"values": report.values + }]; + } + + result(@{@"stats": stats}); + }]; + }else{ + result([FlutterError errorWithCode:@"GetStatsFailed" + message:[NSString stringWithFormat:@"Error %@", @""] + details:nil]); + } +} + +- (NSString *)stringForICEConnectionState:(RTCIceConnectionState)state { + switch (state) { + case RTCIceConnectionStateNew: return @"new"; + case RTCIceConnectionStateChecking: return @"checking"; + case RTCIceConnectionStateConnected: return @"connected"; + case RTCIceConnectionStateCompleted: return @"completed"; + case RTCIceConnectionStateFailed: return @"failed"; + case RTCIceConnectionStateDisconnected: return @"disconnected"; + case RTCIceConnectionStateClosed: return @"closed"; + case RTCIceConnectionStateCount: return @"count"; + } + return nil; +} + +- (NSString *)stringForICEGatheringState:(RTCIceGatheringState)state { + switch (state) { + case RTCIceGatheringStateNew: return @"new"; + case RTCIceGatheringStateGathering: return @"gathering"; + case RTCIceGatheringStateComplete: return @"complete"; + } + return nil; +} + +- (NSString *)stringForSignalingState:(RTCSignalingState)state { + switch (state) { + case RTCSignalingStateStable: return @"stable"; + case RTCSignalingStateHaveLocalOffer: return @"have-local-offer"; + case RTCSignalingStateHaveLocalPrAnswer: return @"have-local-pranswer"; + case RTCSignalingStateHaveRemoteOffer: return @"have-remote-offer"; + case RTCSignalingStateHaveRemotePrAnswer: return @"have-remote-pranswer"; + case RTCSignalingStateClosed: return @"closed"; + } + return nil; +} + + +/** + * Parses the constraint keys and values of a specific JavaScript object into + * a specific NSMutableDictionary in a format suitable for the + * initialization of a RTCMediaConstraints instance. + * + * @param src The JavaScript object which defines constraint keys and values and + * which is to be parsed into the specified dst. + * @param dst The NSMutableDictionary into which the constraint keys + * and values defined by src are to be written in a format suitable for + * the initialization of a RTCMediaConstraints instance. + */ +- (void)parseJavaScriptConstraints:(NSDictionary *)src + intoWebRTCConstraints:(NSMutableDictionary *)dst { + for (id srcKey in src) { + id srcValue = src[srcKey]; + NSString *dstValue; + + if ([srcValue isKindOfClass:[NSNumber class]]) { + dstValue = [srcValue boolValue] ? @"true" : @"false"; + } else { + dstValue = [srcValue description]; + } + dst[[srcKey description]] = dstValue; + } +} + +/** + * Parses a JavaScript object into a new RTCMediaConstraints instance. + * + * @param constraints The JavaScript object to parse into a new + * RTCMediaConstraints instance. + * @returns A new RTCMediaConstraints instance initialized with the + * mandatory and optional constraint keys and values specified by + * constraints. + */ +- (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints { + id mandatory = constraints[@"mandatory"]; + NSMutableDictionary *mandatory_ + = [NSMutableDictionary new]; + + if ([mandatory isKindOfClass:[NSDictionary class]]) { + [self parseJavaScriptConstraints:(NSDictionary *)mandatory + intoWebRTCConstraints:mandatory_]; + } + + id optional = constraints[@"optional"]; + NSMutableDictionary *optional_ + = [NSMutableDictionary new]; + + if ([optional isKindOfClass:[NSArray class]]) { + for (id o in (NSArray *)optional) { + if ([o isKindOfClass:[NSDictionary class]]) { + [self parseJavaScriptConstraints:(NSDictionary *)o + intoWebRTCConstraints:optional_]; + } + } + } + + return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ + optionalConstraints:optional_]; +} + +#pragma mark - RTCPeerConnectionDelegate methods + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingState:(RTCSignalingState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"signalingState", + @"state" : [self stringForSignalingState:newState]}); + } +} + +-(void)peerConnection:(RTCPeerConnection *)peerConnection + mediaStream:(RTCMediaStream *)stream didAddTrack:(RTCVideoTrack*)track{ + + peerConnection.remoteTracks[track.trackId] = track; + NSString *streamId = stream.streamId; + peerConnection.remoteStreams[streamId] = stream; + + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onAddTrack", + @"streamId": streamId, + @"trackId": track.trackId, + @"track": @{ + @"id": track.trackId, + @"kind": track.kind, + @"label": track.trackId, + @"enabled": @(track.isEnabled), + @"remote": @(YES), + @"readyState": @"live"} + }); + } +} + +-(void)peerConnection:(RTCPeerConnection *)peerConnection + mediaStream:(RTCMediaStream *)stream didRemoveTrack:(RTCVideoTrack*)track{ + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + NSString *streamId = stream.streamId; + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onRemoveTrack", + @"streamId": streamId, + @"trackId": track.trackId, + @"track": @{ + @"id": track.trackId, + @"kind": track.kind, + @"label": track.trackId, + @"enabled": @(track.isEnabled), + @"remote": @(YES), + @"readyState": @"live"} + }); + } +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream { + NSMutableArray *audioTracks = [NSMutableArray array]; + NSMutableArray *videoTracks = [NSMutableArray array]; + + for (RTCAudioTrack *track in stream.audioTracks) { + peerConnection.remoteTracks[track.trackId] = track; + [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; + } + + for (RTCVideoTrack *track in stream.videoTracks) { + peerConnection.remoteTracks[track.trackId] = track; + [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; + } + + NSString *streamId = stream.streamId; + peerConnection.remoteStreams[streamId] = stream; + + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onAddStream", + @"streamId": streamId, + @"audioTracks": audioTracks, + @"videoTracks": videoTracks, + }); + } +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream { + NSArray *keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; + // We assume there can be only one object for 1 key + if (keysArray.count > 1) { + NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", stream.streamId); + } + NSString *streamId = stream.streamId; + + for (RTCVideoTrack *track in stream.videoTracks) { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + } + for (RTCAudioTrack *track in stream.audioTracks) { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + } + [peerConnection.remoteStreams removeObjectForKey:streamId]; + + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onRemoveStream", + @"streamId": streamId, + }); + } +} + +- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection { + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{@"event" : @"onRenegotiationNeeded",}); + } +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"iceConnectionState", + @"state" : [self stringForICEConnectionState:newState] + }); + } +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"iceGatheringState", + @"state" : [self stringForICEGatheringState:newState] + }); + } +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate { + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onCandidate", + @"candidate" : @{@"candidate": candidate.sdp, @"sdpMLineIndex": @(candidate.sdpMLineIndex), @"sdpMid": candidate.sdpMid} + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RTCDataChannel*)dataChannel { + if (-1 == dataChannel.channelId) { + return; + } + + NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; + dataChannel.peerConnectionId = peerConnection.flutterId; + dataChannel.delegate = self; + peerConnection.dataChannels[dataChannelId] = dataChannel; + + FlutterEventChannel *eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnection.flutterId, dataChannel.channelId] + binaryMessenger:self.messenger]; + + dataChannel.eventChannel = eventChannel; + dataChannel.flutterChannelId = dataChannelId; + [eventChannel setStreamHandler:dataChannel]; + + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"didOpenDataChannel", + @"id": dataChannelId, + @"label": dataChannel.label + }); + } +} + +/** Called any time the PeerConnectionState changes. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection +didChangeConnectionState:(RTCPeerConnectionState)newState { + +} + +- (void)peerConnection:(RTCPeerConnection *)peerConnection +didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver { + +} + +/** Called when a receiver and its track are created. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection + didAddReceiver:(RTCRtpReceiver *)rtpReceiver + streams:(NSArray *)mediaStreams { + // For unified-plan + NSMutableArray* streams = [NSMutableArray array]; + for(RTCMediaStream *stream in mediaStreams) { + [streams addObject:[self mediaStreamToMap:stream ownerTag:peerConnection.flutterId]]; + } + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event": @"onTrack", + @"track": [self mediaTrackToMap:rtpReceiver.track], + @"receiver": [self receiverToMap:rtpReceiver], + @"streams": streams, + }); + } +} + +/** Called when the receiver and its track are removed. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection + didRemoveReceiver:(RTCRtpReceiver *)rtpReceiver { + +} + +/** Called when the selected ICE candidate pair is changed. */ +- (void)peerConnection:(RTCPeerConnection *)peerConnection + didChangeLocalCandidate:(RTCIceCandidate *)local + remoteCandidate:(RTCIceCandidate *)remote + lastReceivedMs:(int)lastDataReceivedMs + changeReason:(NSString *)reason { + + FlutterEventSink eventSink = peerConnection.eventSink; + if(eventSink){ + eventSink(@{ + @"event" : @"onSelectedCandidatePairChanged", + @"local" : @{ + @"candidate": local.sdp, + @"sdpMLineIndex": @(local.sdpMLineIndex), + @"sdpMid": local.sdpMid + }, + @"remote" : @{ + @"candidate": remote.sdp, + @"sdpMLineIndex": @(remote.sdpMLineIndex), + @"sdpMid": remote.sdpMid + }, + @"reason": reason, + @"lastDataReceivedMs": @(lastDataReceivedMs) + }); + } +} + +@end + diff --git a/common/darwin/Classes/FlutterRTCVideoRenderer.h b/common/darwin/Classes/FlutterRTCVideoRenderer.h new file mode 100755 index 0000000000..96dcd2203d --- /dev/null +++ b/common/darwin/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1,33 @@ +#import "FlutterWebRTCPlugin.h" + +#import +#import +#import +#import + +@interface FlutterRTCVideoRenderer : NSObject + +/** + * The {@link RTCVideoTrack}, if any, which this instance renders. + */ +@property (nonatomic, strong) RTCVideoTrack *videoTrack; +@property (nonatomic) int64_t textureId; +@property (nonatomic, weak) id registry; +@property (nonatomic, strong) FlutterEventSink eventSink; + +- (instancetype)initWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger; + +- (void)dispose; + +@end + + +@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) + +- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger; + +-(void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack; + +@end diff --git a/common/darwin/Classes/FlutterRTCVideoRenderer.m b/common/darwin/Classes/FlutterRTCVideoRenderer.m new file mode 100755 index 0000000000..234849ee2c --- /dev/null +++ b/common/darwin/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1,268 @@ +#import "FlutterRTCVideoRenderer.h" + +#import +#import +#import +#import + +#import +#include "libyuv.h" + +#import "FlutterWebRTCPlugin.h" + +@implementation FlutterRTCVideoRenderer { + CGSize _frameSize; + CGSize _renderSize; + CVPixelBufferRef _pixelBufferRef; + RTCVideoRotation _rotation; + FlutterEventChannel* _eventChannel; + bool _isFirstFrameRendered; +} + +@synthesize textureId = _textureId; +@synthesize registry = _registry; +@synthesize eventSink = _eventSink; + +- (instancetype)initWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger{ + self = [super init]; + if (self){ + _isFirstFrameRendered = false; + _frameSize = CGSizeZero; + _renderSize = CGSizeZero; + _rotation = -1; + _registry = registry; + _pixelBufferRef = nil; + _eventSink = nil; + _rotation = -1; + _textureId = [registry registerTexture:self]; + /*Create Event Channel.*/ + _eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId] + binaryMessenger:messenger]; + [_eventChannel setStreamHandler:self]; + } + return self; +} + +-(void)dealloc { + if(_pixelBufferRef){ + CVBufferRelease(_pixelBufferRef); + } +} + +- (CVPixelBufferRef)copyPixelBuffer { + if(_pixelBufferRef != nil){ + CVBufferRetain(_pixelBufferRef); + return _pixelBufferRef; + } + return nil; +} + +-(void)dispose{ + [_registry unregisterTexture:_textureId]; +} + +- (void)setVideoTrack:(RTCVideoTrack *)videoTrack { + RTCVideoTrack *oldValue = self.videoTrack; + + if (oldValue != videoTrack) { + _isFirstFrameRendered = false; + if (oldValue) { + [oldValue removeRenderer:self]; + } + _videoTrack = videoTrack; + _frameSize = CGSizeZero; + _renderSize = CGSizeZero; + _rotation = -1; + if (videoTrack) { + [videoTrack addRenderer:self]; + } + } +} + + +-(id) correctRotation:(const id) src + withRotation:(RTCVideoRotation) rotation +{ + + int rotated_width = src.width; + int rotated_height = src.height; + + if (rotation == RTCVideoRotation_90 || + rotation == RTCVideoRotation_270) { + int temp = rotated_width; + rotated_width = rotated_height; + rotated_height = temp; + } + + id buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width height:rotated_height]; + + I420Rotate(src.dataY, src.strideY, + src.dataU, src.strideU, + src.dataV, src.strideV, + (uint8_t*)buffer.dataY, buffer.strideY, + (uint8_t*)buffer.dataU,buffer.strideU, + (uint8_t*)buffer.dataV, buffer.strideV, + src.width, src.height, + (RotationModeEnum)rotation); + + return buffer; +} + +-(void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer withFrame:(RTCVideoFrame *) frame +{ + id i420Buffer = [self correctRotation:[frame.buffer toI420] withRotation:frame.rotation]; + CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); + + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); + if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || + pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + // NV12 + uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); + const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); + uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); + const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); + + I420ToNV12(i420Buffer.dataY, + i420Buffer.strideY, + i420Buffer.dataU, + i420Buffer.strideU, + i420Buffer.dataV, + i420Buffer.strideV, + dstY, + (int)dstYStride, + dstUV, + (int)dstUVStride, + i420Buffer.width, + i420Buffer.height); + } else { + uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); + const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); + + if (pixelFormat == kCVPixelFormatType_32BGRA) { + // Corresponds to libyuv::FOURCC_ARGB + I420ToARGB(i420Buffer.dataY, + i420Buffer.strideY, + i420Buffer.dataU, + i420Buffer.strideU, + i420Buffer.dataV, + i420Buffer.strideV, + dst, + (int)bytesPerRow, + i420Buffer.width, + i420Buffer.height); + } else if (pixelFormat == kCVPixelFormatType_32ARGB) { + // Corresponds to libyuv::FOURCC_BGRA + I420ToBGRA(i420Buffer.dataY, + i420Buffer.strideY, + i420Buffer.dataU, + i420Buffer.strideU, + i420Buffer.dataV, + i420Buffer.strideV, + dst, + (int)bytesPerRow, + i420Buffer.width, + i420Buffer.height); + } + } + + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); +} + +#pragma mark - RTCVideoRenderer methods +- (void)renderFrame:(RTCVideoFrame *)frame { + + [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; + + __weak FlutterRTCVideoRenderer *weakSelf = self; + if(_renderSize.width != frame.width || _renderSize.height != frame.height){ + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer *strongSelf = weakSelf; + if(strongSelf.eventSink){ + strongSelf.eventSink(@{ + @"event" : @"didTextureChangeVideoSize", + @"id": @(strongSelf.textureId), + @"width": @(frame.width), + @"height": @(frame.height), + }); + } + }); + _renderSize = CGSizeMake(frame.width, frame.height); + } + + if(frame.rotation != _rotation){ + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer *strongSelf = weakSelf; + if(strongSelf.eventSink){ + strongSelf.eventSink(@{ + @"event" : @"didTextureChangeRotation", + @"id": @(strongSelf.textureId), + @"rotation": @(frame.rotation), + }); + } + }); + + _rotation = frame.rotation; + } + + //Notify the Flutter new pixelBufferRef to be ready. + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer *strongSelf = weakSelf; + [strongSelf.registry textureFrameAvailable:strongSelf.textureId]; + if (!strongSelf->_isFirstFrameRendered) { + if (strongSelf.eventSink) { + strongSelf.eventSink(@{@"event":@"didFirstFrameRendered"}); + strongSelf->_isFirstFrameRendered = true; + } + } + }); +} + +/** + * Sets the size of the video frame to render. + * + * @param size The size of the video frame to render. + */ +- (void)setSize:(CGSize)size { + if(_pixelBufferRef == nil || (size.width != _frameSize.width || size.height != _frameSize.height)) + { + if(_pixelBufferRef){ + CVBufferRelease(_pixelBufferRef); + } + NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferCreate(kCFAllocatorDefault, + size.width, size.height, + kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); + + _frameSize = size; + } +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + _eventSink = sink; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) + +- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger{ + return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger]; +} + +-(void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack { + renderer.videoTrack = videoTrack; +} +@end + diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.h b/common/darwin/Classes/FlutterWebRTCPlugin.h new file mode 100644 index 0000000000..93c81725a5 --- /dev/null +++ b/common/darwin/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1,34 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_MAC +#import +#endif + +#import +#import + +@class FlutterRTCVideoRenderer; + +@interface FlutterWebRTCPlugin : NSObject + +@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory; +@property (nonatomic, strong) NSMutableDictionary *peerConnections; +@property (nonatomic, strong) NSMutableDictionary *localStreams; +@property (nonatomic, strong) NSMutableDictionary *localTracks; +@property (nonatomic, strong) NSMutableDictionary *renders; +#if TARGET_OS_IPHONE +@property (nonatomic, retain) UIViewController *viewController;/*for broadcast or ReplayKit */ +#endif +@property (nonatomic, strong) NSObject* messenger; +@property (nonatomic, strong) RTCCameraVideoCapturer *videoCapturer; +@property (nonatomic) BOOL _usingFrontCamera; +@property (nonatomic) int _targetWidth; +@property (nonatomic) int _targetHeight; +@property (nonatomic) int _targetFps; + +- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId; +- (NSDictionary*)mediaStreamToMap:(RTCMediaStream *)stream ownerTag:(NSString*)ownerTag; +- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track; +- (NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver; + +@end diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.m b/common/darwin/Classes/FlutterWebRTCPlugin.m new file mode 100644 index 0000000000..2c6afad1f0 --- /dev/null +++ b/common/darwin/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1,1415 @@ +#import "FlutterWebRTCPlugin.h" +#import "FlutterRTCPeerConnection.h" +#import "FlutterRTCMediaStream.h" +#import "FlutterRTCDataChannel.h" +#import "FlutterRTCVideoRenderer.h" + +#import +#import + +@implementation FlutterWebRTCPlugin { + FlutterMethodChannel *_methodChannel; + id _registry; + id _messenger; + id _textures; + BOOL _speakerOn; +} + +@synthesize messenger = _messenger; + ++ (void)registerWithRegistrar:(NSObject*)registrar { + + FlutterMethodChannel* channel = [FlutterMethodChannel + methodChannelWithName:@"FlutterWebRTC.Method" + binaryMessenger:[registrar messenger]]; +#if TARGET_OS_IPHONE + UIViewController *viewController = (UIViewController *)registrar.messenger; +#endif + FlutterWebRTCPlugin* instance = [[FlutterWebRTCPlugin alloc] initWithChannel:channel + registrar:registrar + messenger:[registrar messenger] +#if TARGET_OS_IPHONE + viewController:viewController +#endif + withTextures:[registrar textures]]; + [registrar addMethodCallDelegate:instance channel:channel]; +} + +- (instancetype)initWithChannel:(FlutterMethodChannel *)channel + registrar:(NSObject*)registrar + messenger:(NSObject*)messenger +#if TARGET_OS_IPHONE + viewController:(UIViewController *)viewController +#endif + withTextures:(NSObject *)textures{ + + self = [super init]; + + if (self) { + _methodChannel = channel; + _registry = registrar; + _textures = textures; + _messenger = messenger; + _speakerOn = NO; +#if TARGET_OS_IPHONE + self.viewController = viewController; +#endif + } + //RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose); + RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init]; + RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init]; + + _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] + initWithEncoderFactory:encoderFactory + decoderFactory:decoderFactory]; + + + self.peerConnections = [NSMutableDictionary new]; + self.localStreams = [NSMutableDictionary new]; + self.localTracks = [NSMutableDictionary new]; + self.renders = [[NSMutableDictionary alloc] init]; +#if TARGET_OS_IPHONE + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didSessionRouteChange:) name:AVAudioSessionRouteChangeNotification object:nil]; +#endif + return self; +} + + +- (void)didSessionRouteChange:(NSNotification *)notification { + NSDictionary *interuptionDict = notification.userInfo; +#if TARGET_OS_IPHONE + NSInteger routeChangeReason = [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue]; + + switch (routeChangeReason) { + case AVAudioSessionRouteChangeReasonCategoryChange: { + NSError* error; + [[AVAudioSession sharedInstance] overrideOutputAudioPort:_speakerOn? AVAudioSessionPortOverrideSpeaker : AVAudioSessionPortOverrideNone error:&error]; + } + break; + + default: + break; + } +#endif +} + +- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result { + + if ([@"createPeerConnection" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* configuration = argsMap[@"configuration"]; + NSDictionary* constraints = argsMap[@"constraints"]; + + RTCPeerConnection *peerConnection = [self.peerConnectionFactory + peerConnectionWithConfiguration:[self RTCConfiguration:configuration] + constraints:[self parseMediaConstraints:constraints] + delegate:self]; + + peerConnection.remoteStreams = [NSMutableDictionary new]; + peerConnection.remoteTracks = [NSMutableDictionary new]; + peerConnection.dataChannels = [NSMutableDictionary new]; + + NSString *peerConnectionId = [[NSUUID UUID] UUIDString]; + peerConnection.flutterId = peerConnectionId; + + /*Create Event Channel.*/ + peerConnection.eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/peerConnectoinEvent%@", peerConnectionId] + binaryMessenger:_messenger]; + [peerConnection.eventChannel setStreamHandler:peerConnection]; + + self.peerConnections[peerConnectionId] = peerConnection; + result(@{ @"peerConnectionId" : peerConnectionId}); + } else if ([@"getUserMedia" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + [self getUserMedia:constraints result:result]; + } else if ([@"getDisplayMedia" isEqualToString:call.method]) { +#if TARGET_OS_IPHONE + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + [self getDisplayMedia:constraints result:result]; +#else + result(FlutterMethodNotImplemented); +#endif + } else if ([@"createLocalMediaStream" isEqualToString:call.method]) { + [self createLocalMediaStream:result]; + } else if ([@"getSources" isEqualToString:call.method]) { + [self getSources:result]; + } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + [self mediaStreamGetTracks:streamId result:result]; + } else if ([@"createOffer" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) + { + [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result ]; + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"createAnswer" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary * constraints = argsMap[@"constraints"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) + { + [self peerConnectionCreateAnswer:constraints + peerConnection:peerConnection + result:result]; + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addStream" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + + NSString* streamId = ((NSString*)argsMap[@"streamId"]); + RTCMediaStream *stream = self.localStreams[streamId]; + + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + + if(peerConnection && stream){ + [peerConnection addStream:stream]; + result(@""); + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] + details:nil]); + } + } else if ([@"removeStream" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + + NSString* streamId = ((NSString*)argsMap[@"streamId"]); + RTCMediaStream *stream = self.localStreams[streamId]; + + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + + if(peerConnection && stream){ + [peerConnection removeStream:stream]; + result(nil); + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] + details:nil]); + } + } else if ([@"captureFrame" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* path = argsMap[@"path"]; + NSString* trackId = argsMap[@"trackId"]; + + RTCMediaStreamTrack *track = [self trackForId: trackId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); + } + } + } else if ([@"setLocalDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + NSDictionary *descriptionMap = argsMap[@"description"]; + NSString* sdp = descriptionMap[@"sdp"]; + RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; + RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; + if(peerConnection) + { + [self peerConnectionSetLocalDescription:description peerConnection:peerConnection result:result]; + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"setRemoteDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + NSDictionary *descriptionMap = argsMap[@"description"]; + NSString* sdp = descriptionMap[@"sdp"]; + RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; + RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; + + if(peerConnection) + { + [self peerConnectionSetRemoteDescription:description peerConnection:peerConnection result:result]; + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"sendDtmf" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* tone = argsMap[@"tone"]; + int duration = ((NSNumber*)argsMap[@"duration"]).intValue; + int interToneGap = ((NSNumber*)argsMap[@"gap"]).intValue; + + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) { + + RTCRtpSender* audioSender = nil ; + for( RTCRtpSender *rtpSender in peerConnection.senders){ + if([[[rtpSender track] kind] isEqualToString:@"audio"]) { + audioSender = rtpSender; + } + } + if(audioSender){ + NSOperationQueue *queue = [[NSOperationQueue alloc] init]; + [queue addOperationWithBlock:^{ + double durationMs = duration / 1000.0; + double interToneGapMs = interToneGap / 1000.0; + [audioSender.dtmfSender insertDtmf :(NSString *)tone + duration:(NSTimeInterval) durationMs interToneGap:(NSTimeInterval)interToneGapMs]; + NSLog(@"DTMF Tone played "); + }]; + } + + result(@{@"result": @"success"}); + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addCandidate" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* candMap = argsMap[@"candidate"]; + NSString *sdp = candMap[@"candidate"]; + int sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; + NSString *sdpMid = candMap[@"sdpMid"]; + + RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp sdpMLineIndex:sdpMLineIndex sdpMid:sdpMid]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + + if(peerConnection) + { + [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; + }else{ + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getStats" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) + return [self peerConnectionGetStats:trackId peerConnection:peerConnection result:result]; + result(nil); + } else if ([@"createDataChannel" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* label = argsMap[@"label"]; + NSDictionary * dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; + [self createDataChannel:peerConnectionId + label:label + config:[self RTCDataChannelConfiguration:dataChannelDict] + messenger:_messenger]; + result(nil); + } else if ([@"dataChannelSend" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + NSString* type = argsMap[@"type"]; + id data = argsMap[@"data"]; + + [self dataChannelSend:peerConnectionId + dataChannelId:dataChannelId + data:data + type:type]; + result(nil); + } else if ([@"dataChannelClose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + [self dataChannelClose:peerConnectionId + dataChannelId:dataChannelId]; + result(nil); + } else if ([@"streamDispose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + RTCMediaStream *stream = self.localStreams[streamId]; + if (stream) { + for (RTCVideoTrack *track in stream.videoTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + RTCVideoSource *source = videoTrack.source; + if(source){ + [self.videoCapturer stopCapture]; + self.videoCapturer = nil; + } + } + for (RTCAudioTrack *track in stream.audioTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + [self.localStreams removeObjectForKey:streamId]; + } + result(nil); + } else if ([@"mediaStreamTrackSetEnable" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* enabled = argsMap[@"enabled"]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if(track != nil){ + track.isEnabled = enabled.boolValue; + } + result(nil); + } else if ([@"mediaStreamAddTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + NSString* trackId = argsMap[@"trackId"]; + + RTCMediaStream *stream = self.localStreams[streamId]; + if (stream) { + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if(track != nil) { + if([track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; + [stream addAudioTrack:audioTrack]; + } else if ([track isKindOfClass:[RTCVideoTrack class]]){ + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [stream addVideoTrack:videoTrack]; + } + } else { + result([FlutterError errorWithCode:@"mediaStreamAddTrack: Track is nil" message:nil details:nil]); + } + } else { + result([FlutterError errorWithCode:@"mediaStreamAddTrack: Stream is nil" message:nil details:nil]); + } + result(nil); + } else if ([@"mediaStreamRemoveTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCMediaStream *stream = self.localStreams[streamId]; + if (stream) { + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if(track != nil) { + if([track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; + [stream removeAudioTrack:audioTrack]; + } else if ([track isKindOfClass:[RTCVideoTrack class]]){ + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [stream removeVideoTrack:videoTrack]; + } + } else { + result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Track is nil" message:nil details:nil]); + } + } else { + result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Stream is nil" message:nil details:nil]); + } + result(nil); + } else if ([@"trackDispose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + [self.localTracks removeObjectForKey:trackId]; + result(nil); + } else if ([@"peerConnectionClose" isEqualToString:call.method] || [@"peerConnectionDispose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [peerConnection close]; + [self.peerConnections removeObjectForKey:peerConnectionId]; + + // Clean up peerConnection's streams and tracks + [peerConnection.remoteStreams removeAllObjects]; + [peerConnection.remoteTracks removeAllObjects]; + + // Clean up peerConnection's dataChannels. + NSMutableDictionary *dataChannels = peerConnection.dataChannels; + for (NSNumber *dataChannelId in dataChannels) { + dataChannels[dataChannelId].delegate = nil; + // There is no need to close the RTCDataChannel because it is owned by the + // RTCPeerConnection and the latter will close the former. + } + [dataChannels removeAllObjects]; + } + result(nil); + } else if ([@"createVideoRenderer" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + FlutterRTCVideoRenderer* render = [self createWithTextureRegistry:_textures + messenger:_messenger]; + self.renders[@(render.textureId)] = render; + result(@{@"textureId": @(render.textureId)}); + } else if ([@"videoRendererDispose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSNumber *textureId = argsMap[@"textureId"]; + FlutterRTCVideoRenderer *render = self.renders[textureId]; + render.videoTrack = nil; + [render dispose]; + [self.renders removeObjectForKey:textureId]; + result(nil); + } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSNumber *textureId = argsMap[@"textureId"]; + FlutterRTCVideoRenderer *render = self.renders[textureId]; + NSString *streamId = argsMap[@"streamId"]; + NSString *ownerTag = argsMap[@"ownerTag"]; + if(!render) { + result([FlutterError errorWithCode:@"videoRendererSetSrcObject: render is nil" message:nil details:nil]); + return; + } + RTCMediaStream *stream = nil; + RTCVideoTrack* videoTrack = nil; + if([ownerTag isEqualToString:@"local"]){ + stream = _localStreams[streamId]; + } + if(!stream){ + stream = [self streamForId:streamId peerConnectionId:ownerTag]; + } + if(stream){ + NSArray *videoTracks = stream ? stream.videoTracks : nil; + videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; + if (!videoTrack) { + NSLog(@"Not found video track for RTCMediaStream: %@", streamId); + } + } + [self rendererSetSrcObject:render stream:videoTrack]; + result(nil); + } else if ([@"mediaStreamTrackHasTorch" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [self mediaStreamTrackHasTorch:videoTrack result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); + } + } + } else if ([@"mediaStreamTrackSetTorch" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + BOOL torch = [argsMap[@"torch"] boolValue]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [self mediaStreamTrackSetTorch:videoTrack torch:torch result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); + } + } + } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; + [self mediaStreamTrackSwitchCamera:videoTrack result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); + } + } + } else if ([@"setVolume" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* volume = argsMap[@"volume"]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; + RTCAudioSource *audioSource = audioTrack.source; + audioSource.volume = [volume doubleValue]; + } + result(nil); + } else if ([@"setMicrophoneMute" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* mute = argsMap[@"mute"]; + RTCMediaStreamTrack *track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; + audioTrack.isEnabled = !mute.boolValue; + } + result(nil); + } else if ([@"enableSpeakerphone" isEqualToString:call.method]) { +#if TARGET_OS_IPHONE + NSDictionary* argsMap = call.arguments; + NSNumber* enable = argsMap[@"enable"]; + _speakerOn = enable.boolValue; + AVAudioSession *audioSession = [AVAudioSession sharedInstance]; + [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord + withOptions:_speakerOn ? AVAudioSessionCategoryOptionDefaultToSpeaker : 0 + error:nil]; + [audioSession setActive:YES error:nil]; + result(nil); +#else + result(FlutterMethodNotImplemented); +#endif + } else if ([@"getLocalDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) { + RTCSessionDescription* sdp = peerConnection.localDescription; + NSString *type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp": sdp.sdp, @"type": type}); + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getRemoteDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) { + RTCSessionDescription* sdp = peerConnection.remoteDescription; + NSString *type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp": sdp.sdp, @"type": type}); + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"setConfiguration" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* configuration = argsMap[@"configuration"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection) { + [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] peerConnection:peerConnection]; + result(nil); + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"createSender" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* kind = argsMap[@"kind"]; + NSString* streamId = argsMap[@"streamId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [peerConnection senderWithKind:kind streamId:streamId]; + result([self rtpSenderToMap:sender]); + } else if ([@"closeSender" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + + if(![peerConnection removeTrack:sender]) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: can't close sender!"] + details:nil]); + return; + } + + result(nil); + } else if ([@"addTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* trackId = argsMap[@"trackId"]; + NSArray* streamIds = argsMap[@"streamIds"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if(track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [peerConnection addTrack:track streamIds:streamIds]; + result([self rtpSenderToMap:sender]); + } else if ([@"removeTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [peerConnection removeTrack:sender]; + result(nil); + } else if ([@"addTransceiver" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* transceiverInit = argsMap[@"transceiverInit"]; + NSString* trackId = argsMap[@"trackId"]; + NSString* mediaType = argsMap[@"mediaType"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transceiver = nil; + + if(trackId != nil) { + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit *init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverWithTrack:track init:init]; + } else { + transceiver = [peerConnection addTransceiverWithTrack:track]; + } + } else if (mediaType != nil) { + RTCRtpMediaType rtpMediaType = [self stringToRtpMediaType:mediaType]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit *init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverOfType:(rtpMediaType) init:init]; + } else { + transceiver = [peerConnection addTransceiverOfType:rtpMediaType]; + } + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: Incomplete parameters!"] + details:nil]); + return; + } + + if (transceiver == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: can't addTransceiver!"] + details:nil]); + return; + } + + result([self transceiverToMap:transceiver]); + } else if ([@"rtpTransceiverSetDirection" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* direction = argsMap[@"direction"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if(transcevier == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } +#if TARGET_OS_IPHONE + [transcevier setDirection:[self stringToTransceiverDirection:direction] error:nil]; +#elif TARGET_OS_MAC + [transcevier setDirection:[self stringToTransceiverDirection:direction]]; +#endif + result(nil); + } else if ([@"rtpTransceiverGetCurrentDirection" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if(transcevier == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + result(@{@"result": [self transceiverDirectionString:transcevier.direction]}); + } else if ([@"rtpTransceiverStop" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if(transcevier == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } +#if TARGET_OS_IPHONE + [transcevier stopInternal]; +#elif TARGET_OS_MAC + [transcevier stop]; +#endif + result(nil); + } else if ([@"rtpSenderSetParameters" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + NSDictionary* parameters = argsMap[@"parameters"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [sender setParameters:[self mapToRtpParameters:parameters]]; + + result(nil); + } else if ([@"rtpSenderReplaceTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if(track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderSetTrack" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack *track = [self trackForId:trackId]; + if(track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderDispose" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + if(peerConnection == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; + if(sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [peerConnection removeTrack:sender]; + result(nil); + } else { + result(FlutterMethodNotImplemented); + } +} + +- (void)dealloc +{ + [_localTracks removeAllObjects]; + _localTracks = nil; + [_localStreams removeAllObjects]; + _localStreams = nil; + + for (NSString *peerConnectionId in _peerConnections) { + RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId]; + peerConnection.delegate = nil; + [peerConnection close]; + } + [_peerConnections removeAllObjects]; + _peerConnectionFactory = nil; +} + + +-(void)mediaStreamGetTracks:(NSString*)streamId + result:(FlutterResult)result { + RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""]; + if(stream){ + NSMutableArray *audioTracks = [NSMutableArray array]; + NSMutableArray *videoTracks = [NSMutableArray array]; + + for (RTCMediaStreamTrack *track in stream.audioTracks) { + NSString *trackId = track.trackId; + [self.localTracks setObject:track forKey:trackId]; + [audioTracks addObject:@{ + @"enabled": @(track.isEnabled), + @"id": trackId, + @"kind": track.kind, + @"label": trackId, + @"readyState": @"live", + @"remote": @(NO) + }]; + } + + for (RTCMediaStreamTrack *track in stream.videoTracks) { + NSString *trackId = track.trackId; + [self.localTracks setObject:track forKey:trackId]; + [videoTracks addObject:@{ + @"enabled": @(track.isEnabled), + @"id": trackId, + @"kind": track.kind, + @"label": trackId, + @"readyState": @"live", + @"remote": @(NO) + }]; + } + + result(@{@"audioTracks": audioTracks, @"videoTracks" : videoTracks }); + }else{ + result(nil); + } +} + +- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId { + RTCMediaStream *stream = nil; + if (peerConnectionId.length > 0) { + RTCPeerConnection *peerConnection = [_peerConnections objectForKey:peerConnectionId]; + stream = peerConnection.remoteStreams[streamId]; + } else { + for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { + stream = peerConnection.remoteStreams[streamId]; + if (stream) { + break; + } + } + } + if (!stream) { + stream = _localStreams[streamId]; + } + return stream; +} + +- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId { + RTCMediaStreamTrack *track = _localTracks[trackId]; + if (!track) { + for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { + track = peerConnection.remoteTracks[trackId]; + if (track) { + break; + } + } + } + return track; +} + + + +- (RTCIceServer *)RTCIceServer:(id)json +{ + if (!json) { + NSLog(@"a valid iceServer value"); + return nil; + } + + if (![json isKindOfClass:[NSDictionary class]]) { + NSLog(@"must be an object"); + return nil; + } + + NSArray *urls; + if ([json[@"url"] isKindOfClass:[NSString class]]) { + // TODO: 'url' is non-standard + urls = @[json[@"url"]]; + } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { + urls = @[json[@"urls"]]; + } else { + urls = (NSArray*)json[@"urls"]; + } + + if (json[@"username"] != nil || json[@"credential"] != nil) { + return [[RTCIceServer alloc]initWithURLStrings:urls + username:json[@"username"] + credential:json[@"credential"]]; + } + + return [[RTCIceServer alloc] initWithURLStrings:urls]; +} + + +- (nonnull RTCConfiguration *)RTCConfiguration:(id)json +{ + RTCConfiguration *config = [[RTCConfiguration alloc] init]; + + if (!json) { + return config; + } + + if (![json isKindOfClass:[NSDictionary class]]) { + NSLog(@"must be an object"); + return config; + } + + if (json[@"audioJitterBufferMaxPackets"] != nil && [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { + config.audioJitterBufferMaxPackets = [json[@"audioJitterBufferMaxPackets"] intValue]; + } + + if (json[@"bundlePolicy"] != nil && [json[@"bundlePolicy"] isKindOfClass:[NSString class]]) { + NSString *bundlePolicy = json[@"bundlePolicy"]; + if ([bundlePolicy isEqualToString:@"balanced"]) { + config.bundlePolicy = RTCBundlePolicyBalanced; + } else if ([bundlePolicy isEqualToString:@"max-compat"]) { + config.bundlePolicy = RTCBundlePolicyMaxCompat; + } else if ([bundlePolicy isEqualToString:@"max-bundle"]) { + config.bundlePolicy = RTCBundlePolicyMaxBundle; + } + } + + if (json[@"iceBackupCandidatePairPingInterval"] != nil && [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { + config.iceBackupCandidatePairPingInterval = [json[@"iceBackupCandidatePairPingInterval"] intValue]; + } + + if (json[@"iceConnectionReceivingTimeout"] != nil && [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { + config.iceConnectionReceivingTimeout = [json[@"iceConnectionReceivingTimeout"] intValue]; + } + + if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { + NSMutableArray *iceServers = [NSMutableArray new]; + for (id server in json[@"iceServers"]) { + RTCIceServer *convert = [self RTCIceServer:server]; + if (convert != nil) { + [iceServers addObject:convert]; + } + } + config.iceServers = iceServers; + } + + if (json[@"iceTransportPolicy"] != nil && [json[@"iceTransportPolicy"] isKindOfClass:[NSString class]]) { + NSString *iceTransportPolicy = json[@"iceTransportPolicy"]; + if ([iceTransportPolicy isEqualToString:@"all"]) { + config.iceTransportPolicy = RTCIceTransportPolicyAll; + } else if ([iceTransportPolicy isEqualToString:@"none"]) { + config.iceTransportPolicy = RTCIceTransportPolicyNone; + } else if ([iceTransportPolicy isEqualToString:@"nohost"]) { + config.iceTransportPolicy = RTCIceTransportPolicyNoHost; + } else if ([iceTransportPolicy isEqualToString:@"relay"]) { + config.iceTransportPolicy = RTCIceTransportPolicyRelay; + } + } + + if (json[@"rtcpMuxPolicy"] != nil && [json[@"rtcpMuxPolicy"] isKindOfClass:[NSString class]]) { + NSString *rtcpMuxPolicy = json[@"rtcpMuxPolicy"]; + if ([rtcpMuxPolicy isEqualToString:@"negotiate"]) { + config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate; + } else if ([rtcpMuxPolicy isEqualToString:@"require"]) { + config.rtcpMuxPolicy = RTCRtcpMuxPolicyRequire; + } + } + + if (json[@"tcpCandidatePolicy"] != nil && [json[@"tcpCandidatePolicy"] isKindOfClass:[NSString class]]) { + NSString *tcpCandidatePolicy = json[@"tcpCandidatePolicy"]; + if ([tcpCandidatePolicy isEqualToString:@"enabled"]) { + config.tcpCandidatePolicy = RTCTcpCandidatePolicyEnabled; + } else if ([tcpCandidatePolicy isEqualToString:@"disabled"]) { + config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled; + } + } + + if (json[@"sdpSemantics"] != nil && [json[@"sdpSemantics"] isKindOfClass:[NSString class]]) { + NSString *sdpSemantics = json[@"sdpSemantics"]; + if ([sdpSemantics isEqualToString:@"plan-b"]) { + config.sdpSemantics = RTCSdpSemanticsPlanB; + } else if ([sdpSemantics isEqualToString:@"unified-plan"]) { + config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; + } + } + + return config; +} + +- (RTCDataChannelConfiguration *)RTCDataChannelConfiguration:(id)json +{ + if (!json) { + return nil; + } + if ([json isKindOfClass:[NSDictionary class]]) { + RTCDataChannelConfiguration *init = [RTCDataChannelConfiguration new]; + + if (json[@"id"]) { + [init setChannelId:(int)[json[@"id"] integerValue]]; + } + if (json[@"ordered"]) { + init.isOrdered = [json[@"ordered"] boolValue]; + } + if (json[@"maxRetransmitTime"]) { + init.maxRetransmitTimeMs = [json[@"maxRetransmitTime"] integerValue]; + } + if (json[@"maxRetransmits"]) { + init.maxRetransmits = [json[@"maxRetransmits"] intValue]; + } + if (json[@"negotiated"]) { + init.isNegotiated = [json[@"negotiated"] boolValue]; + } + if (json[@"protocol"]) { + init.protocol = json[@"protocol"]; + } + return init; + } + return nil; +} + +- (CGRect)parseRect:(NSDictionary *)rect { + return CGRectMake([[rect valueForKey:@"left"] doubleValue], + [[rect valueForKey:@"top"] doubleValue], + [[rect valueForKey:@"width"] doubleValue], + [[rect valueForKey:@"height"] doubleValue]); +} + +- (NSDictionary*)dtmfSenderToMap:(id)dtmf Id:(NSString*)Id { + return @{ + @"dtmfSenderId": Id, + @"interToneGap": @(dtmf.interToneGap / 1000.0), + @"duration": @(dtmf.duration / 1000.0), + }; +} + +- (NSDictionary*)rtpParametersToMap:(RTCRtpParameters*)parameters { + NSDictionary *rtcp = @{ + @"cname": parameters.rtcp.cname, + @"reducedSize": @(parameters.rtcp.isReducedSize), + }; + + NSMutableArray *headerExtensions = [NSMutableArray array]; + for (RTCRtpHeaderExtension* headerExtension in parameters.headerExtensions) { + [headerExtensions addObject:@{ + @"uri": headerExtension.uri, + @"encrypted": @(headerExtension.encrypted), + @"id": @(headerExtension.id), + }]; + } + + NSMutableArray *encodings = [NSMutableArray array]; + for (RTCRtpEncodingParameters* encoding in parameters.encodings) { + [encodings addObject:@{ + @"active": @(encoding.isActive), + @"minBitrateBps": encoding.minBitrateBps? encoding.minBitrateBps : [NSNumber numberWithInt:0], + @"maxBitrateBps": encoding.maxBitrateBps? encoding.maxBitrateBps : [NSNumber numberWithInt:0], + @"maxFramerate": encoding.maxFramerate? encoding.maxFramerate : @(30), + @"numTemporalLayers": encoding.numTemporalLayers? encoding.numTemporalLayers : @(1), + @"scaleResolutionDownBy": encoding.scaleResolutionDownBy? @(encoding.scaleResolutionDownBy.doubleValue) : [NSNumber numberWithDouble:1.0], + @"ssrc": encoding.ssrc ? encoding.ssrc : [NSNumber numberWithLong:0] + }]; + } + + NSMutableArray *codecs = [NSMutableArray array]; + for (RTCRtpCodecParameters* codec in parameters.codecs) { + [codecs addObject:@{ + @"name": codec.name, + @"payloadType": @(codec.payloadType), + @"clockRate": codec.clockRate, + @"numChannels": codec.numChannels? codec.numChannels : @(1), + @"parameters": codec.parameters, + @"kind": codec.kind + }]; + } + + return @{ + @"transactionId": parameters.transactionId, + @"rtcp": rtcp, + @"headerExtensions": headerExtensions, + @"encodings": encodings, + @"codecs": codecs + }; +} + +-(NSString*)streamTrackStateToString:(RTCMediaStreamTrackState)state { + switch (state) { + case RTCMediaStreamTrackStateLive: + return @"live"; + case RTCMediaStreamTrackStateEnded: + return @"ended"; + default: + break; + } + return @""; +} + +- (NSDictionary*)mediaStreamToMap:(RTCMediaStream *)stream ownerTag:(NSString*)ownerTag { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCMediaStreamTrack* track in stream.audioTracks) { + [audioTracks addObject:[self mediaTrackToMap:track]]; + } + + for (RTCMediaStreamTrack* track in stream.videoTracks) { + [audioTracks addObject:[self mediaTrackToMap:track]]; + } + + return @{ + @"streamId": stream.streamId, + @"ownerTag": ownerTag, + @"audioTracks": audioTracks, + @"videoTracks":videoTracks, + + }; +} + +- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track { + if(track == nil) + return @{}; + NSDictionary *params = @{ + @"enabled": @(track.isEnabled), + @"id": track.trackId, + @"kind": track.kind, + @"label": track.trackId, + @"readyState": [self streamTrackStateToString:track.readyState], + @"remote": @(YES) + }; + return params; +} + +- (NSDictionary*)rtpSenderToMap:(RTCRtpSender *)sender { + NSDictionary *params = @{ + @"senderId": sender.senderId, + @"ownsTrack": @(YES), + @"rtpParameters": [self rtpParametersToMap:sender.parameters], + @"track": [self mediaTrackToMap:sender.track], + @"dtmfSender": [self dtmfSenderToMap:sender.dtmfSender Id:sender.senderId] + }; + return params; +} + +-(NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver { + NSDictionary *params = @{ + @"receiverId": receiver.receiverId, + @"rtpParameters": [self rtpParametersToMap:receiver.parameters], + @"track": [self mediaTrackToMap:receiver.track], + }; + return params; +} + +-(RTCRtpTransceiver*) getRtpTransceiverById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { + for( RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if([transceiver.mid isEqualToString:Id]){ + return transceiver; + } + } + return nil; +} + +-(RTCRtpSender*) getRtpSnderById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { + for( RTCRtpSender* sender in peerConnection.senders) { + if([sender.senderId isEqualToString:Id]){ + return sender; + } + } + return nil; +} + +-(RTCRtpReceiver*) getRtpReceiverById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { + for( RTCRtpReceiver* receiver in peerConnection.receivers) { + if([receiver.receiverId isEqualToString:Id]){ + return receiver; + } + } + return nil; +} + +-(RTCRtpEncodingParameters*)mapToEncoding:(NSDictionary*)map { + RTCRtpEncodingParameters *encoding = [[RTCRtpEncodingParameters alloc] init]; + encoding.isActive = YES; + encoding.scaleResolutionDownBy = [NSNumber numberWithDouble:1.0]; + encoding.numTemporalLayers = [NSNumber numberWithInt:1]; +#if TARGET_OS_IPHONE + encoding.networkPriority = RTCPriorityLow; + encoding.bitratePriority = 1.0; +#endif + [encoding setRid:map[@"rid"]]; + + if(map[@"active"] != nil) { + [encoding setIsActive:((NSNumber*)map[@"active"]).boolValue]; + } + + if(map[@"minBitrateBps"] != nil) { + [encoding setMinBitrateBps:(NSNumber*)map[@"minBitrateBps"]]; + } + + if(map[@"maxBitrateBps"] != nil) { + [encoding setMaxBitrateBps:(NSNumber*)map[@"maxBitrateBps"]]; + } + + if(map[@"maxFramerate"] != nil) { + [encoding setMaxFramerate:(NSNumber*)map[@"maxFramerate"]]; + } + + if(map[@"numTemporalLayers"] != nil) { + [encoding setNumTemporalLayers:(NSNumber*)map[@"numTemporalLayers"]]; + } + + if(map[@"scaleResolutionDownBy"] != nil) { + [encoding setScaleResolutionDownBy:(NSNumber*)map[@"scaleResolutionDownBy"]]; + } + return encoding; +} + +-(RTCRtpTransceiverInit*)mapToTransceiverInit:(NSDictionary*)map { + NSArray* streamIds = map[@"streamIds"]; + NSArray* encodingsParams = map[@"sendEncodings"]; + NSString* direction = map[@"direction"]; + + RTCRtpTransceiverInit* init = [RTCRtpTransceiverInit alloc]; + init.direction = [self stringToTransceiverDirection:direction]; + init.streamIds = streamIds; + + if(encodingsParams != nil) { + NSArray *sendEncodings = [[NSArray alloc] init]; + for (NSDictionary* map in encodingsParams){ + sendEncodings = [sendEncodings arrayByAddingObject:[self mapToEncoding:map]]; + } + [init setSendEncodings:sendEncodings]; + } + return init; +} + +-(RTCRtpMediaType)stringToRtpMediaType:(NSString*)type { + if([type isEqualToString:@"audio"]) { + return RTCRtpMediaTypeAudio; + } else if([type isEqualToString:@"video"]) { + return RTCRtpMediaTypeVideo; + } else if([type isEqualToString:@"data"]) { + return RTCRtpMediaTypeData; + } + return RTCRtpMediaTypeAudio; +} + +-(RTCRtpTransceiverDirection)stringToTransceiverDirection:(NSString*)type { + if([type isEqualToString:@"sendrecv"]) { + return RTCRtpTransceiverDirectionSendRecv; + } else if([type isEqualToString:@"sendonly"]){ + return RTCRtpTransceiverDirectionSendOnly; + } else if([type isEqualToString: @"recvonly"]){ + return RTCRtpTransceiverDirectionRecvOnly; + } else if([type isEqualToString: @"inactive"]){ + return RTCRtpTransceiverDirectionInactive; + } + return RTCRtpTransceiverDirectionInactive; +} + +-(RTCRtpParameters *)mapToRtpParameters:(NSDictionary *)map { + //TODO: + return nil; +} + +-(NSString*)transceiverDirectionString:(RTCRtpTransceiverDirection)direction { + switch (direction) { + case RTCRtpTransceiverDirectionSendRecv: + return @"sendrecv"; + case RTCRtpTransceiverDirectionSendOnly: + return @"sendonly"; + case RTCRtpTransceiverDirectionRecvOnly: + return @"recvonly"; + case RTCRtpTransceiverDirectionInactive: + return @"inactive"; + } + return nil; +} + +-(NSDictionary*)transceiverToMap:(RTCRtpTransceiver*)transceiver { + NSString* mid = transceiver.mid? transceiver.mid : @""; + NSDictionary* params = @{ + @"transceiverId": mid, + @"mid": mid, + @"direction": [self transceiverDirectionString:transceiver.direction], + @"sender": [self rtpSenderToMap:transceiver.sender], + @"receiver": [self receiverToMap:transceiver.receiver] + }; + return params; +} + +@end diff --git a/ios/Classes/FlutterRPScreenRecorder.h b/ios/Classes/FlutterRPScreenRecorder.h deleted file mode 100644 index f951a3d4ba..0000000000 --- a/ios/Classes/FlutterRPScreenRecorder.h +++ /dev/null @@ -1,9 +0,0 @@ -#import - -@interface FlutterRPScreenRecorder : RTCVideoCapturer - --(void)startCapture; - --(void)stopCapture; - -@end diff --git a/ios/Classes/FlutterRPScreenRecorder.h b/ios/Classes/FlutterRPScreenRecorder.h new file mode 120000 index 0000000000..a34a3193c9 --- /dev/null +++ b/ios/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.h \ No newline at end of file diff --git a/ios/Classes/FlutterRPScreenRecorder.m b/ios/Classes/FlutterRPScreenRecorder.m deleted file mode 100644 index aa61614915..0000000000 --- a/ios/Classes/FlutterRPScreenRecorder.m +++ /dev/null @@ -1,72 +0,0 @@ -#import "FlutterRPScreenRecorder.h" -#import - -//See: https://developer.apple.com/videos/play/wwdc2017/606/ - -@implementation FlutterRPScreenRecorder { - RPScreenRecorder *screenRecorder; - RTCVideoSource *source; -} - -- (instancetype)initWithDelegate:(__weak id)delegate { - source = delegate; - return [super initWithDelegate:delegate]; -} - --(void)startCapture -{ - if(screenRecorder == NULL) - screenRecorder = [RPScreenRecorder sharedRecorder]; - - [screenRecorder setMicrophoneEnabled:NO]; - - if (![screenRecorder isAvailable]) { - NSLog(@"Screen recorder is not available!"); - return; - } - - [screenRecorder startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) { - if (bufferType == RPSampleBufferTypeVideo) {// We want video only now - [self handleSourceBuffer:sampleBuffer sampleType:bufferType]; - } - } completionHandler:^(NSError * _Nullable error) { - if (error != nil) - NSLog(@"!!! startCaptureWithHandler/completionHandler %@ !!!", error); - }]; -} - --(void)stopCapture -{ - [screenRecorder stopCaptureWithHandler:^(NSError * _Nullable error) { - if (error != nil) - NSLog(@"!!! stopCaptureWithHandler/completionHandler %@ !!!", error); - }]; -} - --(void)handleSourceBuffer:(CMSampleBufferRef)sampleBuffer sampleType:(RPSampleBufferType)sampleType -{ - if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || - !CMSampleBufferDataIsReady(sampleBuffer)) { - return; - } - - CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - if (pixelBuffer == nil) { - return; - } - - size_t width = CVPixelBufferGetWidth(pixelBuffer); - size_t height = CVPixelBufferGetHeight(pixelBuffer); - - [source adaptOutputFormatToWidth:width/2 height:height/2 fps:8]; - - RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; - int64_t timeStampNs = - CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC; - RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer - rotation:RTCVideoRotation_0 - timeStampNs:timeStampNs]; - [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; -} - -@end diff --git a/ios/Classes/FlutterRPScreenRecorder.m b/ios/Classes/FlutterRPScreenRecorder.m new file mode 120000 index 0000000000..f4e4d34067 --- /dev/null +++ b/ios/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDataChannel.h b/ios/Classes/FlutterRTCDataChannel.h deleted file mode 100755 index c2e039f072..0000000000 --- a/ios/Classes/FlutterRTCDataChannel.h +++ /dev/null @@ -1,28 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import - -@interface RTCDataChannel (Flutter) -@property (nonatomic, strong) NSString *peerConnectionId; -@property (nonatomic, strong) NSNumber *flutterChannelId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCDataChannel) - - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(nonnull NSString *)label - config:(nonnull RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger; - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId; - - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(nonnull NSString *)data - type:(nonnull NSString *)type; - -@end diff --git a/ios/Classes/FlutterRTCDataChannel.h b/ios/Classes/FlutterRTCDataChannel.h new file mode 120000 index 0000000000..ca751533c4 --- /dev/null +++ b/ios/Classes/FlutterRTCDataChannel.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCDataChannel.m b/ios/Classes/FlutterRTCDataChannel.m deleted file mode 100755 index e408c53076..0000000000 --- a/ios/Classes/FlutterRTCDataChannel.m +++ /dev/null @@ -1,165 +0,0 @@ -#import -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCPeerConnection.h" -#import - -@implementation RTCDataChannel (Flutter) - -- (NSString *)peerConnectionId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setPeerConnectionId:(NSString *)peerConnectionId -{ - objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink )eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSNumber *)flutterChannelId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterChannelId:(NSNumber *)flutterChannelId -{ - objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (RTCDataChannel) - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(NSString *)label - config:(RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; - - if (nil != dataChannel) { - dataChannel.peerConnectionId = peerConnectionId; - NSNumber *dataChannelId = [NSNumber numberWithInteger:config.channelId]; - peerConnection.dataChannels[dataChannelId] = dataChannel; - dataChannel.flutterChannelId = dataChannelId; - dataChannel.delegate = self; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnectionId, [dataChannelId intValue]] - binaryMessenger:messenger]; - - dataChannel.eventChannel = eventChannel; - [eventChannel setStreamHandler:dataChannel]; - } -} - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - RTCDataChannel *dataChannel = dataChannels[dataChannelId]; - FlutterEventChannel *eventChannel = dataChannel.eventChannel; - [eventChannel setStreamHandler:nil]; - dataChannel.eventChannel = nil; - [dataChannel close]; - [dataChannels removeObjectForKey:dataChannelId]; -} - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(id)data - type:(NSString *)type -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = peerConnection.dataChannels[dataChannelId]; - - NSData *bytes = [type isEqualToString:@"binary"] ? - ((FlutterStandardTypedData*)data).data : - [data dataUsingEncoding:NSUTF8StringEncoding]; - - RTCDataBuffer *buffer = [[RTCDataBuffer alloc] initWithData:bytes isBinary:[type isEqualToString:@"binary"]]; - [dataChannel sendData:buffer]; -} - -- (NSString *)stringForDataChannelState:(RTCDataChannelState)state -{ - switch (state) { - case RTCDataChannelStateConnecting: return @"connecting"; - case RTCDataChannelStateOpen: return @"open"; - case RTCDataChannelStateClosing: return @"closing"; - case RTCDataChannelStateClosed: return @"closed"; - } - return nil; -} - -#pragma mark - RTCDataChannelDelegate methods - -// Called when the data channel state has changed. -- (void)dataChannelDidChangeState:(RTCDataChannel*)channel -{ - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelStateChanged", - @"id": channel.flutterChannelId, - @"state": [self stringForDataChannelState:channel.readyState]}); - } -} - -// Called when a data buffer was successfully received. -- (void)dataChannel:(RTCDataChannel *)channel didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer -{ - NSString *type; - id data; - if (buffer.isBinary) { - type = @"binary"; - data = [FlutterStandardTypedData typedDataWithBytes:buffer.data]; - } else { - type = @"text"; - data = [[NSString alloc] initWithData:buffer.data - encoding:NSUTF8StringEncoding]; - } - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelReceiveMessage", - @"id": channel.flutterChannelId, - @"type": type, - @"data": (data ? data : [NSNull null])}); - } -} - -@end diff --git a/ios/Classes/FlutterRTCDataChannel.m b/ios/Classes/FlutterRTCDataChannel.m new file mode 120000 index 0000000000..2c6a822406 --- /dev/null +++ b/ios/Classes/FlutterRTCDataChannel.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCapturer.h b/ios/Classes/FlutterRTCFrameCapturer.h deleted file mode 100644 index 03e7655595..0000000000 --- a/ios/Classes/FlutterRTCFrameCapturer.h +++ /dev/null @@ -1,8 +0,0 @@ -#import -#import - -@interface FlutterRTCFrameCapturer : NSObject - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result; - -@end diff --git a/ios/Classes/FlutterRTCFrameCapturer.h b/ios/Classes/FlutterRTCFrameCapturer.h new file mode 120000 index 0000000000..b732660b2f --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCFrameCapturer.m b/ios/Classes/FlutterRTCFrameCapturer.m deleted file mode 100644 index 34ae78f1a8..0000000000 --- a/ios/Classes/FlutterRTCFrameCapturer.m +++ /dev/null @@ -1,82 +0,0 @@ -#import - -#import "FlutterRTCFrameCapturer.h" - -#include "libyuv.h" - -@import CoreImage; -@import CoreVideo; - -@implementation FlutterRTCFrameCapturer { - RTCVideoTrack* _track; - NSString* _path; - FlutterResult _result; - bool _gotFrame; -} - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result -{ - self = [super init]; - if (self) { - _gotFrame = false; - _track = track; - _path = path; - _result = result; - [track addRenderer:self]; - } - return self; -} - -- (void)setSize:(CGSize)size -{ -} - -- (void)renderFrame:(nullable RTCVideoFrame *)frame -{ - if (_gotFrame || frame == nil) return; - _gotFrame = true; - - id buffer = frame.buffer; - CVPixelBufferRef pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; - - CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; - CIContext *context = [CIContext contextWithOptions:nil]; - CGImageRef cgImage = [context createCGImage:ciImage - fromRect:CGRectMake(0, 0, frame.width, frame.height)]; - - UIImageOrientation orientation; - switch (frame.rotation) { - case RTCVideoRotation_90: - orientation = UIImageOrientationRight; - break; - case RTCVideoRotation_180: - orientation = UIImageOrientationDown; - break; - case RTCVideoRotation_270: - orientation = UIImageOrientationLeft; - default: - orientation = UIImageOrientationUp; - break; - } - - UIImage *uiImage = [UIImage imageWithCGImage:cgImage scale:1 orientation:orientation]; - CGImageRelease(cgImage); - NSData *jpgData = UIImageJPEGRepresentation(uiImage, 0.9f); - - if ([jpgData writeToFile:_path atomically:NO]) { - NSLog(@"File writed successfully to %@", _path); - _result(nil); - } else { - NSLog(@"Failed to write to file"); - _result([FlutterError errorWithCode:@"CaptureFrameFailed" - message:@"Failed to write JPEG data to file" - details:nil]); - } - - dispatch_async(dispatch_get_main_queue(), ^{ - [self->_track removeRenderer:self]; - self->_track = nil; - }); -} - -@end diff --git a/ios/Classes/FlutterRTCFrameCapturer.m b/ios/Classes/FlutterRTCFrameCapturer.m new file mode 120000 index 0000000000..36b15d7c6a --- /dev/null +++ b/ios/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaStream.h b/ios/Classes/FlutterRTCMediaStream.h deleted file mode 100644 index 12f1633cde..0000000000 --- a/ios/Classes/FlutterRTCMediaStream.h +++ /dev/null @@ -1,29 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" - -@interface FlutterWebRTCPlugin (RTCMediaStream) - --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)createLocalMediaStream:(FlutterResult)result; - --(void)getSources:(FlutterResult)result; - --(void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack *)track - result:(FlutterResult) result; - --(void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack *)track - torch:(BOOL) torch - result:(FlutterResult) result; - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track - result:(FlutterResult) result; - --(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track - toPath:(NSString *) path - result:(FlutterResult) result; -@end diff --git a/ios/Classes/FlutterRTCMediaStream.h b/ios/Classes/FlutterRTCMediaStream.h new file mode 120000 index 0000000000..a56c382c17 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaStream.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaStream.m b/ios/Classes/FlutterRTCMediaStream.m deleted file mode 100755 index 322766b39a..0000000000 --- a/ios/Classes/FlutterRTCMediaStream.m +++ /dev/null @@ -1,613 +0,0 @@ -#import - -#import - -#import "FlutterRTCFrameCapturer.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRPScreenRecorder.h" - -@implementation AVCaptureDevice (Flutter) - -- (NSString*)positionString { - switch (self.position) { - case AVCaptureDevicePositionUnspecified: return @"unspecified"; - case AVCaptureDevicePositionBack: return @"back"; - case AVCaptureDevicePositionFront: return @"front"; - } - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCMediaStream) - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} - */ -typedef void (^NavigatorUserMediaErrorCallback)(NSString *errorType, NSString *errorMessage); - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} - */ -typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream *mediaStream); - -- (RTCMediaConstraints *)defaultMediaStreamConstraints { - NSDictionary *mandatoryConstraints - = @{ @"minWidth" : @"1280", - @"minHeight" : @"720", - @"minFrameRate" : @"30" }; - RTCMediaConstraints* constraints = - [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:mandatoryConstraints - optionalConstraints:nil]; - return constraints; -} - -/** - * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the audio-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCAudioTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCAudioTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserAudio:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - NSString *trackId = [[NSUUID UUID] UUIDString]; - RTCAudioTrack *audioTrack - = [self.peerConnectionFactory audioTrackWithTrackId:trackId]; - - [mediaStream addAudioTrack:audioTrack]; - - successCallback(mediaStream); -} - -// TODO: Use RCTConvert for constraints ... --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult) result { - // Initialize RTCMediaStream with a unique label in order to allow multiple - // RTCMediaStream instances initialized by multiple getUserMedia calls to be - // added to 1 RTCPeerConnection instance. As suggested by - // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good - // practice, use a UUID (conforming to RFC4122). - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream - = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - [self - getUserMedia:constraints - successCallback:^ (RTCMediaStream *mediaStream) { - NSString *mediaStreamId = mediaStream.streamId; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in mediaStream.audioTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); - } - errorCallback:^ (NSString *errorType, NSString *errorMessage) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] - message:errorMessage - details:nil]); - } - mediaStream:mediaStream]; -} - -/** - * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which - * satisfies specific constraints and adds it to a specific - * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track - * of the respective media type and the specified {@code constraints} specify - * that a track of the respective media type is required; otherwise, reports - * success for the specified {@code mediaStream} to a specific - * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media - * type-specific iteration of or successfully concludes the - * {@code getUserMedia()} algorithm. The method will be recursively invoked to - * conclude the whole {@code getUserMedia()} algorithm either with (successful) - * satisfaction of the specified {@code constraints} or with failure. - * - * @param constraints The {@code MediaStreamConstraints} which specifies the - * requested media types and which the new {@code RTCAudioTrack} or - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm. - */ -- (void)getUserMedia:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // If mediaStream contains no audioTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local audio content. - if (mediaStream.audioTracks.count == 0) { - // constraints.audio - id audioConstraints = constraints[@"audio"]; - BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; - if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { - [self requestAccessForMediaType:AVMediaTypeAudio - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } - } - - // If mediaStream contains no videoTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local video content. - if (mediaStream.videoTracks.count == 0) { - // constraints.video - id videoConstraints = constraints[@"video"]; - if (videoConstraints) { - BOOL requestAccessForVideo - = [videoConstraints isKindOfClass:[NSNumber class]] - ? [videoConstraints boolValue] - : [videoConstraints isKindOfClass:[NSDictionary class]]; -#if !TARGET_IPHONE_SIMULATOR - if (requestAccessForVideo) { - [self requestAccessForMediaType:AVMediaTypeVideo - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } -#endif - } - } - - // There are audioTracks and/or videoTracks in mediaStream as requested by - // constraints so the getUserMedia() is to conclude with success. - successCallback(mediaStream); -} - -/** - * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the video-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCVideoTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserVideo:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - id videoConstraints = constraints[@"video"]; - AVCaptureDevice *videoDevice; - if ([videoConstraints isKindOfClass:[NSDictionary class]]) { - // constraints.video.optional - id optionalVideoConstraints = videoConstraints[@"optional"]; - if (optionalVideoConstraints - && [optionalVideoConstraints isKindOfClass:[NSArray class]]) { - NSArray *options = optionalVideoConstraints; - for (id item in options) { - if ([item isKindOfClass:[NSDictionary class]]) { - NSString *sourceId = ((NSDictionary *)item)[@"sourceId"]; - if (sourceId) { - videoDevice = [AVCaptureDevice deviceWithUniqueID:sourceId]; - if (videoDevice) { - break; - } - } - } - } - } - if (!videoDevice) { - // constraints.video.facingMode - // - // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode - id facingMode = videoConstraints[@"facingMode"]; - if (facingMode && [facingMode isKindOfClass:[NSString class]]) { - AVCaptureDevicePosition position; - if ([facingMode isEqualToString:@"environment"]) { - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionBack; - } else if ([facingMode isEqualToString:@"user"]) { - self._usingFrontCamera = YES; - position = AVCaptureDevicePositionFront; - } else { - // If the specified facingMode value is not supported, fall back to - // the default video device. - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionUnspecified; - } - videoDevice = [self findDeviceForPosition:position]; - } - } - if (!videoDevice) { - videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - } - - //TODO(rostopira): refactor to separate function and add support for max - - self._targetWidth = 1280; - self._targetHeight = 720; - self._targetFps = 30; - - id mandatory = videoConstraints[@"mandatory"]; - // constraints.video.mandatory - if(mandatory && [mandatory isKindOfClass:[NSDictionary class]]) - { - id widthConstraint = mandatory[@"minWidth"]; - if ([widthConstraint isKindOfClass:[NSString class]]) { - int possibleWidth = [widthConstraint intValue]; - if (possibleWidth != 0) { - self._targetWidth = possibleWidth; - } - } - id heightConstraint = mandatory[@"minHeight"]; - if ([heightConstraint isKindOfClass:[NSString class]]) { - int possibleHeight = [heightConstraint intValue]; - if (possibleHeight != 0) { - self._targetHeight = possibleHeight; - } - } - id fpsConstraint = mandatory[@"minFrameRate"]; - if ([fpsConstraint isKindOfClass:[NSString class]]) { - int possibleFps = [fpsConstraint intValue]; - if (possibleFps != 0) { - self._targetFps = possibleFps; - } - } - } - - if (videoDevice) { - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - if (self.videoCapturer) { - [self.videoCapturer stopCapture]; - } - self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - NSInteger selectedFps = [self selectFpsForFormat:selectedFormat]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:selectedFps completionHandler:^(NSError *error) { - if (error) { - NSLog(@"Start capture error: %@", [error localizedDescription]); - } - }]; - - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - successCallback(mediaStream); - } else { - // According to step 6.2.3 of the getUserMedia() algorithm, if there is no - // source, fail with a new OverconstrainedError. - errorCallback(@"OverconstrainedError", /* errorMessage */ nil); - } -} - --(void)mediaStreamRelease:(RTCMediaStream *)stream -{ - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:stream.streamId]; - } -} - - -/** - * Obtains local media content of a specific type. Requests access for the - * specified {@code mediaType} if necessary. In other words, implements a media - * type-specific iteration of the {@code getUserMedia()} algorithm. - * - * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} - * which specifies the type of the local media content to obtain. - * @param constraints The {@code MediaStreamConstraints} which are to be - * satisfied by the obtained local media content. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is to collect the - * obtained local media content of the specified {@code mediaType}. - */ -- (void)requestAccessForMediaType:(NSString *)mediaType - constraints:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // According to step 6.2.1 of the getUserMedia() algorithm, if there is no - // source, fail "with a new DOMException object whose name attribute has the - // value NotFoundError." - // XXX The following approach does not work for audio in Simulator. That is - // because audio capture is done using AVAudioSession which does not use - // AVCaptureDevice there. Anyway, Simulator will not (visually) request access - // for audio. - if (mediaType == AVMediaTypeVideo - && [AVCaptureDevice devicesWithMediaType:mediaType].count == 0) { - // Since successCallback and errorCallback are asynchronously invoked - // elsewhere, make sure that the invocation here is consistent. - dispatch_async(dispatch_get_main_queue(), ^ { - errorCallback(@"DOMException", @"NotFoundError"); - }); - return; - } - - [AVCaptureDevice - requestAccessForMediaType:mediaType - completionHandler:^ (BOOL granted) { - dispatch_async(dispatch_get_main_queue(), ^ { - if (granted) { - NavigatorUserMediaSuccessCallback scb - = ^ (RTCMediaStream *mediaStream) { - [self getUserMedia:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - }; - - if (mediaType == AVMediaTypeAudio) { - [self getUserAudio:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } else if (mediaType == AVMediaTypeVideo) { - [self getUserVideo:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } - } else { - // According to step 10 Permission Failure of the getUserMedia() - // algorithm, if the user has denied permission, fail "with a new - // DOMException object whose name attribute has the value - // NotAllowedError." - errorCallback(@"DOMException", @"NotAllowedError"); - } - }); - }]; -} - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result { - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - FlutterRPScreenRecorder *screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource]; - - [screenCapturer startCapture]; - - //TODO: - self.videoCapturer = screenCapturer; - - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); -} - --(void)createLocalMediaStream:(FlutterResult)result{ - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": [mediaStream streamId] }); -} - --(void)getSources:(FlutterResult)result{ - NSMutableArray *sources = [NSMutableArray array]; - NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - for (AVCaptureDevice *device in videoDevices) { - [sources addObject:@{ - @"facing": device.positionString, - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"videoinput", - }]; - } - NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; - for (AVCaptureDevice *device in audioDevices) { - [sources addObject:@{ - @"facing": @"", - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"audioinput", - }]; - } - result(@{@"sources": sources}); -} - --(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track -{ - // what's different to mediaStreamTrackStop? only call mediaStream explicitly? - if (mediaStream && track) { - track.isEnabled = NO; - // FIXME this is called when track is removed from the MediaStream, - // but it doesn't mean it can not be added back using MediaStream.addTrack - //TODO: [self.localTracks removeObjectForKey:trackID]; - if ([track.kind isEqualToString:@"audio"]) { - [mediaStream removeAudioTrack:(RTCAudioTrack *)track]; - } else if([track.kind isEqualToString:@"video"]) { - [mediaStream removeVideoTrack:(RTCVideoTrack *)track]; - } - } -} - --(void)mediaStreamTrackSetEnabled:(RTCMediaStreamTrack *)track : (BOOL)enabled -{ - if (track && track.isEnabled != enabled) { - track.isEnabled = enabled; - } -} - --(void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack *)track result:(FlutterResult) result -{ - if (!self.videoCapturer) { - result(@NO); - return; - } - if (self.videoCapturer.captureSession.inputs.count == 0) { - result(@NO); - return; - } - - AVCaptureDeviceInput *deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; - AVCaptureDevice *device = deviceInput.device; - - result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); -} - --(void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack *)track torch:(BOOL)torch result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't set torch"); - return; - } - if (self.videoCapturer.captureSession.inputs.count == 0) { - NSLog(@"Video capturer is missing an input. Can't set torch"); - return; - } - - AVCaptureDeviceInput *deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; - AVCaptureDevice *device = deviceInput.device; - - if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { - NSLog(@"Current capture device does not support torch. Can't set torch"); - return; - } - - NSError *error; - if ([device lockForConfiguration:&error] == NO) { - NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); - return; - } - - device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; - [device unlockForConfiguration]; - - result(nil); -} - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't switch camera"); - return; - } - self._usingFrontCamera = !self._usingFrontCamera; - AVCaptureDevicePosition position = self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; - AVCaptureDevice *videoDevice = [self findDeviceForPosition:position]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:[self selectFpsForFormat:selectedFormat] completionHandler:^(NSError* error){ - if (error != nil) { - result([FlutterError errorWithCode:@"Error while switching camera" message:@"Error while switching camera" details:error]); - } else { - result([NSNumber numberWithBool:self._usingFrontCamera]); - } - }]; -} - --(void)mediaStreamTrackCaptureFrame:(RTCVideoTrack *)track toPath:(NSString *) path result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't capture frame."); - return; - } - - FlutterRTCFrameCapturer *capturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track toPath:path result:result]; -} - --(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track -{ - if (track) { - track.isEnabled = NO; - [self.localTracks removeObjectForKey:track.trackId]; - } -} - -- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position { - if (position == AVCaptureDevicePositionUnspecified) { - return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; - for (AVCaptureDevice *device in captureDevices) { - if (device.position == position) { - return device; - } - } - return captureDevices[0]; -} - -- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device { - NSArray *formats = - [RTCCameraVideoCapturer supportedFormatsForDevice:device]; - AVCaptureDeviceFormat *selectedFormat = nil; - int currentDiff = INT_MAX; - for (AVCaptureDeviceFormat *format in formats) { - CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); - int diff = abs(self._targetWidth - dimension.width) + abs(self._targetHeight - dimension.height); - if (diff < currentDiff) { - selectedFormat = format; - currentDiff = diff; - } else if (diff == currentDiff && pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { - selectedFormat = format; - } - } - return selectedFormat; -} - -- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format { - Float64 maxSupportedFramerate = 0; - for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) { - maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); - } - return fmin(maxSupportedFramerate, self._targetFps); -} - -@end diff --git a/ios/Classes/FlutterRTCMediaStream.m b/ios/Classes/FlutterRTCMediaStream.m new file mode 120000 index 0000000000..2e988ad614 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaStream.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCPeerConnection.h b/ios/Classes/FlutterRTCPeerConnection.h deleted file mode 100755 index b99f885b0a..0000000000 --- a/ios/Classes/FlutterRTCPeerConnection.h +++ /dev/null @@ -1,43 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -@interface RTCPeerConnection (Flutter) -@property (nonatomic, strong) NSMutableDictionary *dataChannels; -@property (nonatomic, strong) NSMutableDictionary *remoteStreams; -@property (nonatomic, strong) NSMutableDictionary *remoteTracks; -@property (nonatomic, strong) NSString *flutterId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result; - --(RTCMediaConstraints *) parseMediaConstraints:(nonnull NSDictionary *)constraints; - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection; - -@end diff --git a/ios/Classes/FlutterRTCPeerConnection.h b/ios/Classes/FlutterRTCPeerConnection.h new file mode 120000 index 0000000000..c4907a3db8 --- /dev/null +++ b/ios/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCPeerConnection.m b/ios/Classes/FlutterRTCPeerConnection.m deleted file mode 100755 index a265d8e8a5..0000000000 --- a/ios/Classes/FlutterRTCPeerConnection.m +++ /dev/null @@ -1,558 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCDataChannel.h" - -#import - -@implementation RTCPeerConnection (Flutter) - -@dynamic eventSink; - -- (NSString *)flutterId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterId:(NSString *)flutterId -{ - objc_setAssociatedObject(self, @selector(flutterId), flutterId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink)eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)dataChannels -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setDataChannels:(NSMutableDictionary *)dataChannels -{ - objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteStreams -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteStreams:(NSMutableDictionary *)remoteStreams -{ - objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteTracks -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteTracks:(NSMutableDictionary *)remoteTracks -{ - objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection -{ - [peerConnection setConfiguration:configuration]; -} - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result -{ - [peerConnection - offerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateOfferFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection - answerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateAnswerFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setLocalDescription:sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetLocalDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setRemoteDescription: sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetRemoteDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection addIceCandidate:candidate]; - result(nil); - //NSLog(@"addICECandidateresult: %@", candidate); -} - --(void) peerConnectionClose:(RTCPeerConnection *)peerConnection -{ - [peerConnection close]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels - = peerConnection.dataChannels; - for (NSString *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; -} - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result -{ - RTCMediaStreamTrack *track = nil; - if (!trackID - || !trackID.length - || (track = self.localTracks[trackID]) - || (track = peerConnection.remoteTracks[trackID])) { - [peerConnection statsForTrack:track - statsOutputLevel:RTCStatsOutputLevelStandard - completionHandler:^(NSArray *reports) { - - NSMutableArray *stats = [NSMutableArray array]; - - for (RTCLegacyStatsReport *report in reports) { - [stats addObject:@{@"id": report.reportId, - @"type": report.type, - @"timestamp": @(report.timestamp), - @"values": report.values - }]; - } - - result(@{@"stats": stats}); - }]; - }else{ - result([FlutterError errorWithCode:@"GetStatsFailed" - message:[NSString stringWithFormat:@"Error %@", @""] - details:nil]); - } -} - -- (NSString *)stringForICEConnectionState:(RTCIceConnectionState)state { - switch (state) { - case RTCIceConnectionStateNew: return @"new"; - case RTCIceConnectionStateChecking: return @"checking"; - case RTCIceConnectionStateConnected: return @"connected"; - case RTCIceConnectionStateCompleted: return @"completed"; - case RTCIceConnectionStateFailed: return @"failed"; - case RTCIceConnectionStateDisconnected: return @"disconnected"; - case RTCIceConnectionStateClosed: return @"closed"; - case RTCIceConnectionStateCount: return @"count"; - } - return nil; -} - -- (NSString *)stringForICEGatheringState:(RTCIceGatheringState)state { - switch (state) { - case RTCIceGatheringStateNew: return @"new"; - case RTCIceGatheringStateGathering: return @"gathering"; - case RTCIceGatheringStateComplete: return @"complete"; - } - return nil; -} - -- (NSString *)stringForSignalingState:(RTCSignalingState)state { - switch (state) { - case RTCSignalingStateStable: return @"stable"; - case RTCSignalingStateHaveLocalOffer: return @"have-local-offer"; - case RTCSignalingStateHaveLocalPrAnswer: return @"have-local-pranswer"; - case RTCSignalingStateHaveRemoteOffer: return @"have-remote-offer"; - case RTCSignalingStateHaveRemotePrAnswer: return @"have-remote-pranswer"; - case RTCSignalingStateClosed: return @"closed"; - } - return nil; -} - - -/** - * Parses the constraint keys and values of a specific JavaScript object into - * a specific NSMutableDictionary in a format suitable for the - * initialization of a RTCMediaConstraints instance. - * - * @param src The JavaScript object which defines constraint keys and values and - * which is to be parsed into the specified dst. - * @param dst The NSMutableDictionary into which the constraint keys - * and values defined by src are to be written in a format suitable for - * the initialization of a RTCMediaConstraints instance. - */ -- (void)parseJavaScriptConstraints:(NSDictionary *)src - intoWebRTCConstraints:(NSMutableDictionary *)dst { - for (id srcKey in src) { - id srcValue = src[srcKey]; - NSString *dstValue; - - if ([srcValue isKindOfClass:[NSNumber class]]) { - dstValue = [srcValue boolValue] ? @"true" : @"false"; - } else { - dstValue = [srcValue description]; - } - dst[[srcKey description]] = dstValue; - } -} - -/** - * Parses a JavaScript object into a new RTCMediaConstraints instance. - * - * @param constraints The JavaScript object to parse into a new - * RTCMediaConstraints instance. - * @returns A new RTCMediaConstraints instance initialized with the - * mandatory and optional constraint keys and values specified by - * constraints. - */ -- (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints { - id mandatory = constraints[@"mandatory"]; - NSMutableDictionary *mandatory_ - = [NSMutableDictionary new]; - - if ([mandatory isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)mandatory - intoWebRTCConstraints:mandatory_]; - } - - id optional = constraints[@"optional"]; - NSMutableDictionary *optional_ - = [NSMutableDictionary new]; - - if ([optional isKindOfClass:[NSArray class]]) { - for (id o in (NSArray *)optional) { - if ([o isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)o - intoWebRTCConstraints:optional_]; - } - } - } - - return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ - optionalConstraints:optional_]; -} - -#pragma mark - RTCPeerConnectionDelegate methods - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingState:(RTCSignalingState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"signalingState", - @"state" : [self stringForSignalingState:newState]}); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didAddTrack:(RTCVideoTrack*)track{ - - peerConnection.remoteTracks[track.trackId] = track; - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didRemoveTrack:(RTCVideoTrack*)track{ - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - NSString *streamId = stream.streamId; - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream { - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in stream.audioTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in stream.videoTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddStream", - @"streamId": streamId, - @"audioTracks": audioTracks, - @"videoTracks": videoTracks, - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream { - NSArray *keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; - // We assume there can be only one object for 1 key - if (keysArray.count > 1) { - NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", stream.streamId); - } - NSString *streamId = stream.streamId; - - for (RTCVideoTrack *track in stream.videoTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - [peerConnection.remoteStreams removeObjectForKey:streamId]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveStream", - @"streamId": streamId, - }); - } -} - -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{@"event" : @"onRenegotiationNeeded",}); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceConnectionState", - @"state" : [self stringForICEConnectionState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceGatheringState", - @"state" : [self stringForICEGatheringState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onCandidate", - @"candidate" : @{@"candidate": candidate.sdp, @"sdpMLineIndex": @(candidate.sdpMLineIndex), @"sdpMid": candidate.sdpMid} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RTCDataChannel*)dataChannel { - if (-1 == dataChannel.channelId) { - return; - } - - NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; - dataChannel.peerConnectionId = peerConnection.flutterId; - dataChannel.delegate = self; - peerConnection.dataChannels[dataChannelId] = dataChannel; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnection.flutterId, dataChannel.channelId] - binaryMessenger:self.messenger]; - - dataChannel.eventChannel = eventChannel; - dataChannel.flutterChannelId = dataChannelId; - [eventChannel setStreamHandler:dataChannel]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"didOpenDataChannel", - @"id": dataChannelId, - @"label": dataChannel.label - }); - } -} - -/** Called any time the PeerConnectionState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection -didChangeConnectionState:(RTCPeerConnectionState)newState { - -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection -didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver { - -} - -/** Called when a receiver and its track are created. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didAddReceiver:(RTCRtpReceiver *)rtpReceiver - streams:(NSArray *)mediaStreams { - // For unified-plan - NSMutableArray* streams = [NSMutableArray array]; - for(RTCMediaStream *stream in mediaStreams) { - [streams addObject:[self mediaStreamToMap:stream ownerTag:peerConnection.flutterId]]; - } - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event": @"onTrack", - @"track": [self mediaTrackToMap:rtpReceiver.track], - @"receiver": [self receiverToMap:rtpReceiver], - @"streams": streams, - }); - } -} - -/** Called when the receiver and its track are removed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveReceiver:(RTCRtpReceiver *)rtpReceiver { - -} - -/** Called when the selected ICE candidate pair is changed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeLocalCandidate:(RTCIceCandidate *)local - remoteCandidate:(RTCIceCandidate *)remote - lastReceivedMs:(int)lastDataReceivedMs - changeReason:(NSString *)reason { - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onSelectedCandidatePairChanged", - @"local" : @{ - @"candidate": local.sdp, - @"sdpMLineIndex": @(local.sdpMLineIndex), - @"sdpMid": local.sdpMid - }, - @"remote" : @{ - @"candidate": remote.sdp, - @"sdpMLineIndex": @(remote.sdpMLineIndex), - @"sdpMid": remote.sdpMid - }, - @"reason": reason, - @"lastDataReceivedMs": @(lastDataReceivedMs) - }); - } -} - -@end - diff --git a/ios/Classes/FlutterRTCPeerConnection.m b/ios/Classes/FlutterRTCPeerConnection.m new file mode 120000 index 0000000000..363aecf0c7 --- /dev/null +++ b/ios/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.m \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoRenderer.h b/ios/Classes/FlutterRTCVideoRenderer.h deleted file mode 100755 index 96dcd2203d..0000000000 --- a/ios/Classes/FlutterRTCVideoRenderer.h +++ /dev/null @@ -1,33 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -#import -#import -#import -#import - -@interface FlutterRTCVideoRenderer : NSObject - -/** - * The {@link RTCVideoTrack}, if any, which this instance renders. - */ -@property (nonatomic, strong) RTCVideoTrack *videoTrack; -@property (nonatomic) int64_t textureId; -@property (nonatomic, weak) id registry; -@property (nonatomic, strong) FlutterEventSink eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - -- (void)dispose; - -@end - - -@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - --(void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack; - -@end diff --git a/ios/Classes/FlutterRTCVideoRenderer.h b/ios/Classes/FlutterRTCVideoRenderer.h new file mode 120000 index 0000000000..2e68777e02 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoRenderer.m b/ios/Classes/FlutterRTCVideoRenderer.m deleted file mode 100755 index 234849ee2c..0000000000 --- a/ios/Classes/FlutterRTCVideoRenderer.m +++ /dev/null @@ -1,268 +0,0 @@ -#import "FlutterRTCVideoRenderer.h" - -#import -#import -#import -#import - -#import -#include "libyuv.h" - -#import "FlutterWebRTCPlugin.h" - -@implementation FlutterRTCVideoRenderer { - CGSize _frameSize; - CGSize _renderSize; - CVPixelBufferRef _pixelBufferRef; - RTCVideoRotation _rotation; - FlutterEventChannel* _eventChannel; - bool _isFirstFrameRendered; -} - -@synthesize textureId = _textureId; -@synthesize registry = _registry; -@synthesize eventSink = _eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - self = [super init]; - if (self){ - _isFirstFrameRendered = false; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - _registry = registry; - _pixelBufferRef = nil; - _eventSink = nil; - _rotation = -1; - _textureId = [registry registerTexture:self]; - /*Create Event Channel.*/ - _eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId] - binaryMessenger:messenger]; - [_eventChannel setStreamHandler:self]; - } - return self; -} - --(void)dealloc { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } -} - -- (CVPixelBufferRef)copyPixelBuffer { - if(_pixelBufferRef != nil){ - CVBufferRetain(_pixelBufferRef); - return _pixelBufferRef; - } - return nil; -} - --(void)dispose{ - [_registry unregisterTexture:_textureId]; -} - -- (void)setVideoTrack:(RTCVideoTrack *)videoTrack { - RTCVideoTrack *oldValue = self.videoTrack; - - if (oldValue != videoTrack) { - _isFirstFrameRendered = false; - if (oldValue) { - [oldValue removeRenderer:self]; - } - _videoTrack = videoTrack; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - if (videoTrack) { - [videoTrack addRenderer:self]; - } - } -} - - --(id) correctRotation:(const id) src - withRotation:(RTCVideoRotation) rotation -{ - - int rotated_width = src.width; - int rotated_height = src.height; - - if (rotation == RTCVideoRotation_90 || - rotation == RTCVideoRotation_270) { - int temp = rotated_width; - rotated_width = rotated_height; - rotated_height = temp; - } - - id buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width height:rotated_height]; - - I420Rotate(src.dataY, src.strideY, - src.dataU, src.strideU, - src.dataV, src.strideV, - (uint8_t*)buffer.dataY, buffer.strideY, - (uint8_t*)buffer.dataU,buffer.strideU, - (uint8_t*)buffer.dataV, buffer.strideV, - src.width, src.height, - (RotationModeEnum)rotation); - - return buffer; -} - --(void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer withFrame:(RTCVideoFrame *) frame -{ - id i420Buffer = [self correctRotation:[frame.buffer toI420] withRotation:frame.rotation]; - CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); - - const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); - if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || - pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { - // NV12 - uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); - const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); - uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); - const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); - - I420ToNV12(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dstY, - (int)dstYStride, - dstUV, - (int)dstUVStride, - i420Buffer.width, - i420Buffer.height); - } else { - uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); - const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); - - if (pixelFormat == kCVPixelFormatType_32BGRA) { - // Corresponds to libyuv::FOURCC_ARGB - I420ToARGB(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } else if (pixelFormat == kCVPixelFormatType_32ARGB) { - // Corresponds to libyuv::FOURCC_BGRA - I420ToBGRA(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } - } - - CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); -} - -#pragma mark - RTCVideoRenderer methods -- (void)renderFrame:(RTCVideoFrame *)frame { - - [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; - - __weak FlutterRTCVideoRenderer *weakSelf = self; - if(_renderSize.width != frame.width || _renderSize.height != frame.height){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeVideoSize", - @"id": @(strongSelf.textureId), - @"width": @(frame.width), - @"height": @(frame.height), - }); - } - }); - _renderSize = CGSizeMake(frame.width, frame.height); - } - - if(frame.rotation != _rotation){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeRotation", - @"id": @(strongSelf.textureId), - @"rotation": @(frame.rotation), - }); - } - }); - - _rotation = frame.rotation; - } - - //Notify the Flutter new pixelBufferRef to be ready. - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - [strongSelf.registry textureFrameAvailable:strongSelf.textureId]; - if (!strongSelf->_isFirstFrameRendered) { - if (strongSelf.eventSink) { - strongSelf.eventSink(@{@"event":@"didFirstFrameRendered"}); - strongSelf->_isFirstFrameRendered = true; - } - } - }); -} - -/** - * Sets the size of the video frame to render. - * - * @param size The size of the video frame to render. - */ -- (void)setSize:(CGSize)size { - if(_pixelBufferRef == nil || (size.width != _frameSize.width || size.height != _frameSize.height)) - { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } - NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; - CVPixelBufferCreate(kCFAllocatorDefault, - size.width, size.height, - kCVPixelFormatType_32BGRA, - (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); - - _frameSize = size; - } -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - _eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - _eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger]; -} - --(void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack { - renderer.videoTrack = videoTrack; -} -@end - diff --git a/ios/Classes/FlutterRTCVideoRenderer.m b/ios/Classes/FlutterRTCVideoRenderer.m new file mode 120000 index 0000000000..77a0efd6d2 --- /dev/null +++ b/ios/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.m \ No newline at end of file diff --git a/ios/Classes/FlutterWebRTCPlugin.h b/ios/Classes/FlutterWebRTCPlugin.h deleted file mode 100644 index acd1f44534..0000000000 --- a/ios/Classes/FlutterWebRTCPlugin.h +++ /dev/null @@ -1,33 +0,0 @@ -#import -#import - -#import -#import -#import -#import -#import -#import - -@class FlutterRTCVideoRenderer; - -@interface FlutterWebRTCPlugin : NSObject - -@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory; -@property (nonatomic, strong) NSMutableDictionary *peerConnections; -@property (nonatomic, strong) NSMutableDictionary *localStreams; -@property (nonatomic, strong) NSMutableDictionary *localTracks; -@property (nonatomic, strong) NSMutableDictionary *renders; -@property (nonatomic, retain) UIViewController *viewController;/*for broadcast or ReplayKit */ -@property (nonatomic, strong) NSObject* messenger; -@property (nonatomic, strong) RTCCameraVideoCapturer *videoCapturer; -@property (nonatomic) BOOL _usingFrontCamera; -@property (nonatomic) int _targetWidth; -@property (nonatomic) int _targetHeight; -@property (nonatomic) int _targetFps; - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId; -- (NSDictionary*)mediaStreamToMap:(RTCMediaStream *)stream ownerTag:(NSString*)ownerTag; -- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track; -- (NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver; - -@end diff --git a/ios/Classes/FlutterWebRTCPlugin.h b/ios/Classes/FlutterWebRTCPlugin.h new file mode 120000 index 0000000000..b8713b38ef --- /dev/null +++ b/ios/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.h \ No newline at end of file diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m deleted file mode 100644 index 1dfd0af07b..0000000000 --- a/ios/Classes/FlutterWebRTCPlugin.m +++ /dev/null @@ -1,1387 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCVideoRenderer.h" - -#import -#import - -@implementation FlutterWebRTCPlugin { - FlutterMethodChannel *_methodChannel; - id _registry; - id _messenger; - id _textures; - BOOL _speakerOn; -} - -@synthesize messenger = _messenger; - -+ (void)registerWithRegistrar:(NSObject*)registrar { - - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"FlutterWebRTC.Method" - binaryMessenger:[registrar messenger]]; - UIViewController *viewController = (UIViewController *)registrar.messenger; - FlutterWebRTCPlugin* instance = [[FlutterWebRTCPlugin alloc] initWithChannel:channel - registrar:registrar - messenger:[registrar messenger] - viewController:viewController - withTextures:[registrar textures]]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithChannel:(FlutterMethodChannel *)channel - registrar:(NSObject*)registrar - messenger:(NSObject*)messenger - viewController:(UIViewController *)viewController - withTextures:(NSObject *)textures{ - - self = [super init]; - - if (self) { - _methodChannel = channel; - _registry = registrar; - _textures = textures; - _messenger = messenger; - _speakerOn = NO; - self.viewController = viewController; - } - //RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose); - RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init]; - RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init]; - - _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] - initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory]; - - - self.peerConnections = [NSMutableDictionary new]; - self.localStreams = [NSMutableDictionary new]; - self.localTracks = [NSMutableDictionary new]; - self.renders = [[NSMutableDictionary alloc] init]; - - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didSessionRouteChange:) name:AVAudioSessionRouteChangeNotification object:nil]; - - return self; -} - - -- (void)didSessionRouteChange:(NSNotification *)notification { - NSDictionary *interuptionDict = notification.userInfo; - NSInteger routeChangeReason = [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue]; - - switch (routeChangeReason) { - case AVAudioSessionRouteChangeReasonCategoryChange: { - NSError* error; - [[AVAudioSession sharedInstance] overrideOutputAudioPort:_speakerOn? AVAudioSessionPortOverrideSpeaker : AVAudioSessionPortOverrideNone error:&error]; - } - break; - - default: - break; - } -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result { - - if ([@"createPeerConnection" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* configuration = argsMap[@"configuration"]; - NSDictionary* constraints = argsMap[@"constraints"]; - - RTCPeerConnection *peerConnection = [self.peerConnectionFactory - peerConnectionWithConfiguration:[self RTCConfiguration:configuration] - constraints:[self parseMediaConstraints:constraints] - delegate:self]; - - peerConnection.remoteStreams = [NSMutableDictionary new]; - peerConnection.remoteTracks = [NSMutableDictionary new]; - peerConnection.dataChannels = [NSMutableDictionary new]; - - NSString *peerConnectionId = [[NSUUID UUID] UUIDString]; - peerConnection.flutterId = peerConnectionId; - - /*Create Event Channel.*/ - peerConnection.eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/peerConnectoinEvent%@", peerConnectionId] - binaryMessenger:_messenger]; - [peerConnection.eventChannel setStreamHandler:peerConnection]; - - self.peerConnections[peerConnectionId] = peerConnection; - result(@{ @"peerConnectionId" : peerConnectionId}); - } else if ([@"getUserMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getUserMedia:constraints result:result]; - } else if ([@"getDisplayMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getDisplayMedia:constraints result:result]; - } else if ([@"createLocalMediaStream" isEqualToString:call.method]) { - [self createLocalMediaStream:result]; - } else if ([@"getSources" isEqualToString:call.method]) { - [self getSources:result]; - } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - [self mediaStreamGetTracks:streamId result:result]; - } else if ([@"createOffer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result ]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"createAnswer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary * constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateAnswer:constraints - peerConnection:peerConnection - result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection addStream:stream]; - result(@""); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"removeStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection removeStream:stream]; - result(nil); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"captureFrame" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* path = argsMap[@"path"]; - NSString* trackId = argsMap[@"trackId"]; - - RTCMediaStreamTrack *track = [self trackForId: trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - if(peerConnection) - { - [self peerConnectionSetLocalDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - - if(peerConnection) - { - [self peerConnectionSetRemoteDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"sendDtmf" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* tone = argsMap[@"tone"]; - int duration = ((NSNumber*)argsMap[@"duration"]).intValue; - int interToneGap = ((NSNumber*)argsMap[@"gap"]).intValue; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - - RTCRtpSender* audioSender = nil ; - for( RTCRtpSender *rtpSender in peerConnection.senders){ - if([[[rtpSender track] kind] isEqualToString:@"audio"]) { - audioSender = rtpSender; - } - } - if(audioSender){ - NSOperationQueue *queue = [[NSOperationQueue alloc] init]; - [queue addOperationWithBlock:^{ - double durationMs = duration / 1000.0; - double interToneGapMs = interToneGap / 1000.0; - [audioSender.dtmfSender insertDtmf :(NSString *)tone - duration:(NSTimeInterval) durationMs interToneGap:(NSTimeInterval)interToneGapMs]; - NSLog(@"DTMF Tone played "); - }]; - } - - result(@{@"result": @"success"}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addCandidate" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* candMap = argsMap[@"candidate"]; - NSString *sdp = candMap[@"candidate"]; - int sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; - NSString *sdpMid = candMap[@"sdpMid"]; - - RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp sdpMLineIndex:sdpMLineIndex sdpMid:sdpMid]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection) - { - [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getStats" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - return [self peerConnectionGetStats:trackId peerConnection:peerConnection result:result]; - result(nil); - } else if ([@"createDataChannel" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* label = argsMap[@"label"]; - NSDictionary * dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; - [self createDataChannel:peerConnectionId - label:label - config:[self RTCDataChannelConfiguration:dataChannelDict] - messenger:_messenger]; - result(nil); - } else if ([@"dataChannelSend" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - NSString* type = argsMap[@"type"]; - id data = argsMap[@"data"]; - - [self dataChannelSend:peerConnectionId - dataChannelId:dataChannelId - data:data - type:type]; - result(nil); - } else if ([@"dataChannelClose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - [self dataChannelClose:peerConnectionId - dataChannelId:dataChannelId]; - result(nil); - } else if ([@"streamDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - RTCVideoSource *source = videoTrack.source; - if(source){ - [self.videoCapturer stopCapture]; - self.videoCapturer = nil; - } - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:streamId]; - } - result(nil); - } else if ([@"mediaStreamTrackSetEnable" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* enabled = argsMap[@"enabled"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil){ - track.isEnabled = enabled.boolValue; - } - result(nil); - } else if ([@"mediaStreamAddTrack" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - NSString* trackId = argsMap[@"trackId"]; - - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil) { - if([track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - [stream addAudioTrack:audioTrack]; - } else if ([track isKindOfClass:[RTCVideoTrack class]]){ - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [stream addVideoTrack:videoTrack]; - } - } else { - result([FlutterError errorWithCode:@"mediaStreamAddTrack: Track is nil" message:nil details:nil]); - } - } else { - result([FlutterError errorWithCode:@"mediaStreamAddTrack: Stream is nil" message:nil details:nil]); - } - result(nil); - } else if ([@"mediaStreamRemoveTrack" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil) { - if([track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - [stream removeAudioTrack:audioTrack]; - } else if ([track isKindOfClass:[RTCVideoTrack class]]){ - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [stream removeVideoTrack:videoTrack]; - } - } else { - result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Track is nil" message:nil details:nil]); - } - } else { - result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Stream is nil" message:nil details:nil]); - } - result(nil); - } else if ([@"trackDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - [self.localTracks removeObjectForKey:trackId]; - result(nil); - } else if ([@"peerConnectionClose" isEqualToString:call.method] || [@"peerConnectionDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if (peerConnection) { - [peerConnection close]; - [self.peerConnections removeObjectForKey:peerConnectionId]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - for (NSNumber *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; - } - result(nil); - } else if ([@"createVideoRenderer" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - FlutterRTCVideoRenderer* render = [self createWithTextureRegistry:_textures - messenger:_messenger]; - self.renders[@(render.textureId)] = render; - result(@{@"textureId": @(render.textureId)}); - } else if ([@"videoRendererDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - render.videoTrack = nil; - [render dispose]; - [self.renders removeObjectForKey:textureId]; - result(nil); - } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - NSString *streamId = argsMap[@"streamId"]; - NSString *ownerTag = argsMap[@"ownerTag"]; - if(!render) { - result([FlutterError errorWithCode:@"videoRendererSetSrcObject: render is nil" message:nil details:nil]); - return; - } - RTCMediaStream *stream = nil; - RTCVideoTrack* videoTrack = nil; - if([ownerTag isEqualToString:@"local"]){ - stream = _localStreams[streamId]; - } - if(!stream){ - stream = [self streamForId:streamId peerConnectionId:ownerTag]; - } - if(stream){ - NSArray *videoTracks = stream ? stream.videoTracks : nil; - videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; - if (!videoTrack) { - NSLog(@"Not found video track for RTCMediaStream: %@", streamId); - } - } - [self rendererSetSrcObject:render stream:videoTrack]; - result(nil); - } else if ([@"mediaStreamTrackHasTorch" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackHasTorch:videoTrack result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"mediaStreamTrackSetTorch" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - BOOL torch = [argsMap[@"torch"] boolValue]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackSetTorch:videoTrack torch:torch result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackSwitchCamera:videoTrack result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setVolume" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* volume = argsMap[@"volume"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - RTCAudioSource *audioSource = audioTrack.source; - audioSource.volume = [volume doubleValue]; - } - result(nil); - } else if ([@"setMicrophoneMute" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* mute = argsMap[@"mute"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - audioTrack.isEnabled = !mute.boolValue; - } - result(nil); - } else if ([@"enableSpeakerphone" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSNumber* enable = argsMap[@"enable"]; - _speakerOn = enable.boolValue; - AVAudioSession *audioSession = [AVAudioSession sharedInstance]; - [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord - withOptions:_speakerOn ? AVAudioSessionCategoryOptionDefaultToSpeaker : 0 - error:nil]; - [audioSession setActive:YES error:nil]; - result(nil); - } else if ([@"getLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.localDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.remoteDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setConfiguration" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* configuration = argsMap[@"configuration"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] peerConnection:peerConnection]; - result(nil); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"createSender" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* kind = argsMap[@"kind"]; - NSString* streamId = argsMap[@"streamId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - RTCRtpSender* sender = [peerConnection senderWithKind:kind streamId:streamId]; - result([self rtpSenderToMap:sender]); - } else if ([@"closeSender" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* senderId = argsMap[@"senderId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; - if(sender == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: sender not found!"] - details:nil]); - return; - } - - if(![peerConnection removeTrack:sender]) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: can't close sender!"] - details:nil]); - return; - } - - result(nil); - } else if ([@"addTrack" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* trackId = argsMap[@"trackId"]; - NSArray* streamIds = argsMap[@"streamIds"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - - RTCMediaStreamTrack *track = [self trackForId:trackId]; - if(track == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: track not found!"] - details:nil]); - return; - } - RTCRtpSender* sender = [peerConnection addTrack:track streamIds:streamIds]; - result([self rtpSenderToMap:sender]); - } else if ([@"removeTrack" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* senderId = argsMap[@"senderId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; - if(sender == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: sender not found!"] - details:nil]); - return; - } - [peerConnection removeTrack:sender]; - result(nil); - } else if ([@"addTransceiver" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* transceiverInit = argsMap[@"transceiverInit"]; - NSString* trackId = argsMap[@"trackId"]; - NSString* mediaType = argsMap[@"mediaType"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - RTCRtpTransceiver* transceiver = nil; - - if(trackId != nil) { - RTCMediaStreamTrack *track = [self trackForId:trackId]; - if (transceiverInit != nil) { - RTCRtpTransceiverInit *init = [self mapToTransceiverInit:transceiverInit]; - transceiver = [peerConnection addTransceiverWithTrack:track init:init]; - } else { - transceiver = [peerConnection addTransceiverWithTrack:track]; - } - } else if (mediaType != nil) { - RTCRtpMediaType rtpMediaType = [self stringToRtpMediaType:mediaType]; - if (transceiverInit != nil) { - RTCRtpTransceiverInit *init = [self mapToTransceiverInit:transceiverInit]; - transceiver = [peerConnection addTransceiverOfType:(rtpMediaType) init:init]; - } else { - transceiver = [peerConnection addTransceiverOfType:rtpMediaType]; - } - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: Incomplete parameters!"] - details:nil]); - return; - } - - if (transceiver == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: can't addTransceiver!"] - details:nil]); - return; - } - - result([self transceiverToMap:transceiver]); - } else if ([@"rtpTransceiverSetDirection" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* direction = argsMap[@"direction"]; - NSString* transceiverId = argsMap[@"transceiverId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; - if(transcevier == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: transcevier not found!"] - details:nil]); - return; - } - [transcevier setDirection:[self stringToTransceiverDirection:direction] error:nil]; - result(nil); - } else if ([@"rtpTransceiverGetCurrentDirection" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* transceiverId = argsMap[@"transceiverId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; - if(transcevier == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: transcevier not found!"] - details:nil]); - return; - } - result(@{@"result": [self transceiverDirectionString:transcevier.direction]}); - } else if ([@"rtpTransceiverStop" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* transceiverId = argsMap[@"transceiverId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - RTCRtpTransceiver *transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; - if(transcevier == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: transcevier not found!"] - details:nil]); - return; - } - [transcevier stopInternal]; - result(nil); - } else if ([@"rtpSenderSetParameters" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* senderId = argsMap[@"senderId"]; - NSDictionary* parameters = argsMap[@"parameters"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; - if(sender == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: sender not found!"] - details:nil]); - return; - } - [sender setParameters:[self mapToRtpParameters:parameters]]; - - result(nil); - } else if ([@"rtpSenderReplaceTrack" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* senderId = argsMap[@"senderId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; - if(sender == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: sender not found!"] - details:nil]); - return; - } - RTCMediaStreamTrack *track = [self trackForId:trackId]; - if(track == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: track not found!"] - details:nil]); - return; - } - [sender setTrack:track]; - result(nil); - } else if ([@"rtpSenderSetTrack" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* senderId = argsMap[@"senderId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; - if(sender == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: sender not found!"] - details:nil]); - return; - } - RTCMediaStreamTrack *track = [self trackForId:trackId]; - if(track == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: track not found!"] - details:nil]); - return; - } - [sender setTrack:track]; - result(nil); - } else if ([@"rtpSenderDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* senderId = argsMap[@"senderId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - return; - } - RTCRtpSender *sender = [self getRtpSnderById:peerConnection Id:senderId]; - if(sender == nil) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: sender not found!"] - details:nil]); - return; - } - [peerConnection removeTrack:sender]; - result(nil); - } else { - result(FlutterMethodNotImplemented); - } -} - -- (void)dealloc -{ - [_localTracks removeAllObjects]; - _localTracks = nil; - [_localStreams removeAllObjects]; - _localStreams = nil; - - for (NSString *peerConnectionId in _peerConnections) { - RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId]; - peerConnection.delegate = nil; - [peerConnection close]; - } - [_peerConnections removeAllObjects]; - _peerConnectionFactory = nil; -} - - --(void)mediaStreamGetTracks:(NSString*)streamId - result:(FlutterResult)result { - RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""]; - if(stream){ - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCMediaStreamTrack *track in stream.audioTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [audioTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - for (RTCMediaStreamTrack *track in stream.videoTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [videoTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - result(@{@"audioTracks": audioTracks, @"videoTracks" : videoTracks }); - }else{ - result(nil); - } -} - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId { - RTCMediaStream *stream = nil; - if (peerConnectionId.length > 0) { - RTCPeerConnection *peerConnection = [_peerConnections objectForKey:peerConnectionId]; - stream = peerConnection.remoteStreams[streamId]; - } else { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - stream = peerConnection.remoteStreams[streamId]; - if (stream) { - break; - } - } - } - if (!stream) { - stream = _localStreams[streamId]; - } - return stream; -} - -- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId { - RTCMediaStreamTrack *track = _localTracks[trackId]; - if (!track) { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - track = peerConnection.remoteTracks[trackId]; - if (track) { - break; - } - } - } - return track; -} - - - -- (RTCIceServer *)RTCIceServer:(id)json -{ - if (!json) { - NSLog(@"a valid iceServer value"); - return nil; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return nil; - } - - NSArray *urls; - if ([json[@"url"] isKindOfClass:[NSString class]]) { - // TODO: 'url' is non-standard - urls = @[json[@"url"]]; - } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { - urls = @[json[@"urls"]]; - } else { - urls = (NSArray*)json[@"urls"]; - } - - if (json[@"username"] != nil || json[@"credential"] != nil) { - return [[RTCIceServer alloc]initWithURLStrings:urls - username:json[@"username"] - credential:json[@"credential"]]; - } - - return [[RTCIceServer alloc] initWithURLStrings:urls]; -} - - -- (nonnull RTCConfiguration *)RTCConfiguration:(id)json -{ - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - - if (!json) { - return config; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return config; - } - - if (json[@"audioJitterBufferMaxPackets"] != nil && [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { - config.audioJitterBufferMaxPackets = [json[@"audioJitterBufferMaxPackets"] intValue]; - } - - if (json[@"bundlePolicy"] != nil && [json[@"bundlePolicy"] isKindOfClass:[NSString class]]) { - NSString *bundlePolicy = json[@"bundlePolicy"]; - if ([bundlePolicy isEqualToString:@"balanced"]) { - config.bundlePolicy = RTCBundlePolicyBalanced; - } else if ([bundlePolicy isEqualToString:@"max-compat"]) { - config.bundlePolicy = RTCBundlePolicyMaxCompat; - } else if ([bundlePolicy isEqualToString:@"max-bundle"]) { - config.bundlePolicy = RTCBundlePolicyMaxBundle; - } - } - - if (json[@"iceBackupCandidatePairPingInterval"] != nil && [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { - config.iceBackupCandidatePairPingInterval = [json[@"iceBackupCandidatePairPingInterval"] intValue]; - } - - if (json[@"iceConnectionReceivingTimeout"] != nil && [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { - config.iceConnectionReceivingTimeout = [json[@"iceConnectionReceivingTimeout"] intValue]; - } - - if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { - NSMutableArray *iceServers = [NSMutableArray new]; - for (id server in json[@"iceServers"]) { - RTCIceServer *convert = [self RTCIceServer:server]; - if (convert != nil) { - [iceServers addObject:convert]; - } - } - config.iceServers = iceServers; - } - - if (json[@"iceTransportPolicy"] != nil && [json[@"iceTransportPolicy"] isKindOfClass:[NSString class]]) { - NSString *iceTransportPolicy = json[@"iceTransportPolicy"]; - if ([iceTransportPolicy isEqualToString:@"all"]) { - config.iceTransportPolicy = RTCIceTransportPolicyAll; - } else if ([iceTransportPolicy isEqualToString:@"none"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNone; - } else if ([iceTransportPolicy isEqualToString:@"nohost"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNoHost; - } else if ([iceTransportPolicy isEqualToString:@"relay"]) { - config.iceTransportPolicy = RTCIceTransportPolicyRelay; - } - } - - if (json[@"rtcpMuxPolicy"] != nil && [json[@"rtcpMuxPolicy"] isKindOfClass:[NSString class]]) { - NSString *rtcpMuxPolicy = json[@"rtcpMuxPolicy"]; - if ([rtcpMuxPolicy isEqualToString:@"negotiate"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate; - } else if ([rtcpMuxPolicy isEqualToString:@"require"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyRequire; - } - } - - if (json[@"tcpCandidatePolicy"] != nil && [json[@"tcpCandidatePolicy"] isKindOfClass:[NSString class]]) { - NSString *tcpCandidatePolicy = json[@"tcpCandidatePolicy"]; - if ([tcpCandidatePolicy isEqualToString:@"enabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyEnabled; - } else if ([tcpCandidatePolicy isEqualToString:@"disabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled; - } - } - - if (json[@"sdpSemantics"] != nil && [json[@"sdpSemantics"] isKindOfClass:[NSString class]]) { - NSString *sdpSemantics = json[@"sdpSemantics"]; - if ([sdpSemantics isEqualToString:@"plan-b"]) { - config.sdpSemantics = RTCSdpSemanticsPlanB; - } else if ([sdpSemantics isEqualToString:@"unified-plan"]) { - config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; - } - } - - return config; -} - -- (RTCDataChannelConfiguration *)RTCDataChannelConfiguration:(id)json -{ - if (!json) { - return nil; - } - if ([json isKindOfClass:[NSDictionary class]]) { - RTCDataChannelConfiguration *init = [RTCDataChannelConfiguration new]; - - if (json[@"id"]) { - [init setChannelId:(int)[json[@"id"] integerValue]]; - } - if (json[@"ordered"]) { - init.isOrdered = [json[@"ordered"] boolValue]; - } - if (json[@"maxRetransmitTime"]) { - init.maxRetransmitTimeMs = [json[@"maxRetransmitTime"] integerValue]; - } - if (json[@"maxRetransmits"]) { - init.maxRetransmits = [json[@"maxRetransmits"] intValue]; - } - if (json[@"negotiated"]) { - init.isNegotiated = [json[@"negotiated"] boolValue]; - } - if (json[@"protocol"]) { - init.protocol = json[@"protocol"]; - } - return init; - } - return nil; -} - -- (CGRect)parseRect:(NSDictionary *)rect { - return CGRectMake([[rect valueForKey:@"left"] doubleValue], - [[rect valueForKey:@"top"] doubleValue], - [[rect valueForKey:@"width"] doubleValue], - [[rect valueForKey:@"height"] doubleValue]); -} - -- (NSDictionary*)dtmfSenderToMap:(id)dtmf Id:(NSString*)Id { - return @{ - @"dtmfSenderId": Id, - @"interToneGap": @(dtmf.interToneGap / 1000.0), - @"duration": @(dtmf.duration / 1000.0), - }; -} - -- (NSDictionary*)rtpParametersToMap:(RTCRtpParameters*)parameters { - NSDictionary *rtcp = @{ - @"cname": parameters.rtcp.cname, - @"reducedSize": @(parameters.rtcp.isReducedSize), - }; - - NSMutableArray *headerExtensions = [NSMutableArray array]; - for (RTCRtpHeaderExtension* headerExtension in parameters.headerExtensions) { - [headerExtensions addObject:@{ - @"uri": headerExtension.uri, - @"encrypted": @(headerExtension.encrypted), - @"id": @(headerExtension.id), - }]; - } - - NSMutableArray *encodings = [NSMutableArray array]; - for (RTCRtpEncodingParameters* encoding in parameters.encodings) { - [encodings addObject:@{ - @"active": @(encoding.isActive), - @"minBitrateBps": encoding.minBitrateBps? encoding.minBitrateBps : [NSNumber numberWithInt:0], - @"maxBitrateBps": encoding.maxBitrateBps? encoding.maxBitrateBps : [NSNumber numberWithInt:0], - @"maxFramerate": encoding.maxFramerate? encoding.maxFramerate : @(30), - @"numTemporalLayers": encoding.numTemporalLayers? encoding.numTemporalLayers : @(1), - @"scaleResolutionDownBy": encoding.scaleResolutionDownBy? @(encoding.scaleResolutionDownBy.doubleValue) : [NSNumber numberWithDouble:1.0], - @"ssrc": encoding.ssrc ? encoding.ssrc : [NSNumber numberWithLong:0] - }]; - } - - NSMutableArray *codecs = [NSMutableArray array]; - for (RTCRtpCodecParameters* codec in parameters.codecs) { - [codecs addObject:@{ - @"name": codec.name, - @"payloadType": @(codec.payloadType), - @"clockRate": codec.clockRate, - @"numChannels": codec.numChannels? codec.numChannels : @(1), - @"parameters": codec.parameters, - @"kind": codec.kind - }]; - } - - return @{ - @"transactionId": parameters.transactionId, - @"rtcp": rtcp, - @"headerExtensions": headerExtensions, - @"encodings": encodings, - @"codecs": codecs - }; -} - --(NSString*)streamTrackStateToString:(RTCMediaStreamTrackState)state { - switch (state) { - case RTCMediaStreamTrackStateLive: - return @"live"; - case RTCMediaStreamTrackStateEnded: - return @"ended"; - default: - break; - } - return @""; -} - -- (NSDictionary*)mediaStreamToMap:(RTCMediaStream *)stream ownerTag:(NSString*)ownerTag { - NSMutableArray* audioTracks = [NSMutableArray array]; - NSMutableArray* videoTracks = [NSMutableArray array]; - - for (RTCMediaStreamTrack* track in stream.audioTracks) { - [audioTracks addObject:[self mediaTrackToMap:track]]; - } - - for (RTCMediaStreamTrack* track in stream.videoTracks) { - [audioTracks addObject:[self mediaTrackToMap:track]]; - } - - return @{ - @"streamId": stream.streamId, - @"ownerTag": ownerTag, - @"audioTracks": audioTracks, - @"videoTracks":videoTracks, - - }; -} - -- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track { - if(track == nil) - return @{}; - NSDictionary *params = @{ - @"enabled": @(track.isEnabled), - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"readyState": [self streamTrackStateToString:track.readyState], - @"remote": @(YES) - }; - return params; -} - -- (NSDictionary*)rtpSenderToMap:(RTCRtpSender *)sender { - NSDictionary *params = @{ - @"senderId": sender.senderId, - @"ownsTrack": @(YES), - @"rtpParameters": [self rtpParametersToMap:sender.parameters], - @"track": [self mediaTrackToMap:sender.track], - @"dtmfSender": [self dtmfSenderToMap:sender.dtmfSender Id:sender.senderId] - }; - return params; -} - --(NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver { - NSDictionary *params = @{ - @"receiverId": receiver.receiverId, - @"rtpParameters": [self rtpParametersToMap:receiver.parameters], - @"track": [self mediaTrackToMap:receiver.track], - }; - return params; -} - --(RTCRtpTransceiver*) getRtpTransceiverById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { - for( RTCRtpTransceiver* transceiver in peerConnection.transceivers) { - if([transceiver.mid isEqualToString:Id]){ - return transceiver; - } - } - return nil; -} - --(RTCRtpSender*) getRtpSnderById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { - for( RTCRtpSender* sender in peerConnection.senders) { - if([sender.senderId isEqualToString:Id]){ - return sender; - } - } - return nil; -} - --(RTCRtpReceiver*) getRtpReceiverById:(RTCPeerConnection *)peerConnection Id:(NSString*)Id { - for( RTCRtpReceiver* receiver in peerConnection.receivers) { - if([receiver.receiverId isEqualToString:Id]){ - return receiver; - } - } - return nil; -} - --(RTCRtpEncodingParameters*)mapToEncoding:(NSDictionary*)map { - RTCRtpEncodingParameters *encoding = [[RTCRtpEncodingParameters alloc] init]; - encoding.isActive = YES; - encoding.scaleResolutionDownBy = [NSNumber numberWithDouble:1.0]; - encoding.numTemporalLayers = [NSNumber numberWithInt:1]; - encoding.networkPriority = RTCPriorityLow; - encoding.bitratePriority = 1.0; - [encoding setRid:map[@"rid"]]; - - if(map[@"active"] != nil) { - [encoding setIsActive:((NSNumber*)map[@"active"]).boolValue]; - } - - if(map[@"minBitrateBps"] != nil) { - [encoding setMinBitrateBps:(NSNumber*)map[@"minBitrateBps"]]; - } - - if(map[@"maxBitrateBps"] != nil) { - [encoding setMaxBitrateBps:(NSNumber*)map[@"maxBitrateBps"]]; - } - - if(map[@"maxFramerate"] != nil) { - [encoding setMaxFramerate:(NSNumber*)map[@"maxFramerate"]]; - } - - if(map[@"numTemporalLayers"] != nil) { - [encoding setNumTemporalLayers:(NSNumber*)map[@"numTemporalLayers"]]; - } - - if(map[@"scaleResolutionDownBy"] != nil) { - [encoding setScaleResolutionDownBy:(NSNumber*)map[@"scaleResolutionDownBy"]]; - } - return encoding; -} - --(RTCRtpTransceiverInit*)mapToTransceiverInit:(NSDictionary*)map { - NSArray* streamIds = map[@"streamIds"]; - NSArray* encodingsParams = map[@"sendEncodings"]; - NSString* direction = map[@"direction"]; - - RTCRtpTransceiverInit* init = [RTCRtpTransceiverInit alloc]; - init.direction = [self stringToTransceiverDirection:direction]; - init.streamIds = streamIds; - - if(encodingsParams != nil) { - NSArray *sendEncodings = [[NSArray alloc] init]; - for (NSDictionary* map in encodingsParams){ - sendEncodings = [sendEncodings arrayByAddingObject:[self mapToEncoding:map]]; - } - [init setSendEncodings:sendEncodings]; - } - return init; -} - --(RTCRtpMediaType)stringToRtpMediaType:(NSString*)type { - if([type isEqualToString:@"audio"]) { - return RTCRtpMediaTypeAudio; - } else if([type isEqualToString:@"video"]) { - return RTCRtpMediaTypeVideo; - } else if([type isEqualToString:@"data"]) { - return RTCRtpMediaTypeData; - } - return RTCRtpMediaTypeAudio; -} - --(RTCRtpTransceiverDirection)stringToTransceiverDirection:(NSString*)type { - if([type isEqualToString:@"sendrecv"]) { - return RTCRtpTransceiverDirectionSendRecv; - } else if([type isEqualToString:@"sendonly"]){ - return RTCRtpTransceiverDirectionSendOnly; - } else if([type isEqualToString: @"recvonly"]){ - return RTCRtpTransceiverDirectionRecvOnly; - } else if([type isEqualToString: @"inactive"]){ - return RTCRtpTransceiverDirectionInactive; - } - return RTCRtpTransceiverDirectionInactive; -} - --(RTCRtpParameters *)mapToRtpParameters:(NSDictionary *)map { - //TODO: - return nil; -} - --(NSString*)transceiverDirectionString:(RTCRtpTransceiverDirection)direction { - switch (direction) { - case RTCRtpTransceiverDirectionSendRecv: - return @"sendrecv"; - case RTCRtpTransceiverDirectionSendOnly: - return @"sendonly"; - case RTCRtpTransceiverDirectionRecvOnly: - return @"recvonly"; - case RTCRtpTransceiverDirectionInactive: - return @"inactive"; - } - return nil; -} - --(NSDictionary*)transceiverToMap:(RTCRtpTransceiver*)transceiver { - NSString* mid = transceiver.mid? transceiver.mid : @""; - NSDictionary* params = @{ - @"transceiverId": mid, - @"mid": mid, - @"direction": [self transceiverDirectionString:transceiver.direction], - @"sender": [self rtpSenderToMap:transceiver.sender], - @"receiver": [self receiverToMap:transceiver.receiver] - }; - return params; -} - -@end diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m new file mode 120000 index 0000000000..7d5cc6ca16 --- /dev/null +++ b/ios/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.m \ No newline at end of file diff --git a/macos/Classes/FlutterRPScreenRecorder.h b/macos/Classes/FlutterRPScreenRecorder.h new file mode 120000 index 0000000000..a34a3193c9 --- /dev/null +++ b/macos/Classes/FlutterRPScreenRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.h \ No newline at end of file diff --git a/macos/Classes/FlutterRPScreenRecorder.m b/macos/Classes/FlutterRPScreenRecorder.m new file mode 120000 index 0000000000..f4e4d34067 --- /dev/null +++ b/macos/Classes/FlutterRPScreenRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRPScreenRecorder.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDataChannel.h b/macos/Classes/FlutterRTCDataChannel.h deleted file mode 100755 index c2e039f072..0000000000 --- a/macos/Classes/FlutterRTCDataChannel.h +++ /dev/null @@ -1,28 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import - -@interface RTCDataChannel (Flutter) -@property (nonatomic, strong) NSString *peerConnectionId; -@property (nonatomic, strong) NSNumber *flutterChannelId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCDataChannel) - - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(nonnull NSString *)label - config:(nonnull RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger; - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId; - - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(nonnull NSString *)data - type:(nonnull NSString *)type; - -@end diff --git a/macos/Classes/FlutterRTCDataChannel.h b/macos/Classes/FlutterRTCDataChannel.h new file mode 120000 index 0000000000..ca751533c4 --- /dev/null +++ b/macos/Classes/FlutterRTCDataChannel.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCDataChannel.m b/macos/Classes/FlutterRTCDataChannel.m deleted file mode 100755 index 0f1295817b..0000000000 --- a/macos/Classes/FlutterRTCDataChannel.m +++ /dev/null @@ -1,165 +0,0 @@ -#import -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCPeerConnection.h" -#import - -@implementation RTCDataChannel (Flutter) - -- (NSString *)peerConnectionId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setPeerConnectionId:(NSString *)peerConnectionId -{ - objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink )eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSNumber *)flutterChannelId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterChannelId:(NSNumber *)flutterChannelId -{ - objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (RTCDataChannel) - --(void)createDataChannel:(nonnull NSString *)peerConnectionId - label:(NSString *)label - config:(RTCDataChannelConfiguration *)config - messenger:(NSObject*)messenger -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; - - if (nil != dataChannel) { - dataChannel.peerConnectionId = peerConnectionId; - NSNumber *dataChannelId = [NSNumber numberWithInteger:config.channelId]; - peerConnection.dataChannels[dataChannelId] = dataChannel; - dataChannel.flutterChannelId = dataChannelId; - dataChannel.delegate = self; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnectionId, [dataChannelId intValue]] - binaryMessenger:messenger]; - - dataChannel.eventChannel = eventChannel; - [eventChannel setStreamHandler:dataChannel]; - } -} - --(void)dataChannelClose:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - RTCDataChannel *dataChannel = dataChannels[dataChannelId]; - FlutterEventChannel *eventChannel = dataChannel.eventChannel; - [eventChannel setStreamHandler:nil]; - dataChannel.eventChannel = nil; - [dataChannel close]; - [dataChannels removeObjectForKey:dataChannelId]; -} - --(void)dataChannelSend:(nonnull NSString *)peerConnectionId - dataChannelId:(nonnull NSString *)dataChannelId - data:(id)data - type:(NSString *)type -{ - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - RTCDataChannel *dataChannel = peerConnection.dataChannels[dataChannelId]; - - NSData *bytes = [type isEqualToString:@"binary"] ? - ((FlutterStandardTypedData*)data).data : - [data dataUsingEncoding:NSUTF8StringEncoding]; - - RTCDataBuffer *buffer = [[RTCDataBuffer alloc] initWithData:bytes isBinary:[type isEqualToString:@"binary"]]; - [dataChannel sendData:buffer]; -} - -- (NSString *)stringForDataChannelState:(RTCDataChannelState)state -{ - switch (state) { - case RTCDataChannelStateConnecting: return @"connecting"; - case RTCDataChannelStateOpen: return @"open"; - case RTCDataChannelStateClosing: return @"closing"; - case RTCDataChannelStateClosed: return @"closed"; - } - return nil; -} - -#pragma mark - RTCDataChannelDelegate methods - -// Called when the data channel state has changed. -- (void)dataChannelDidChangeState:(RTCDataChannel*)channel -{ - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelStateChanged", - @"id": channel.flutterChannelId, - @"state": [self stringForDataChannelState:channel.readyState]}); - } -} - -// Called when a data buffer was successfully received. -- (void)dataChannel:(RTCDataChannel *)channel didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer -{ - NSString *type; - id data; - if (buffer.isBinary) { - type = @"binary"; - data = [FlutterStandardTypedData typedDataWithBytes:buffer.data]; - } else { - type = @"text"; - data = [[NSString alloc] initWithData:buffer.data - encoding:NSUTF8StringEncoding]; - } - RTCPeerConnection *peerConnection = self.peerConnections[channel.peerConnectionId]; - FlutterEventSink eventSink = channel.eventSink; - if(eventSink) { - eventSink(@{ @"event" : @"dataChannelReceiveMessage", - @"id": channel.flutterChannelId, - @"type": type, - @"data": (data ? data : [NSNull null])}); - } -} - -@end diff --git a/macos/Classes/FlutterRTCDataChannel.m b/macos/Classes/FlutterRTCDataChannel.m new file mode 120000 index 0000000000..2c6a822406 --- /dev/null +++ b/macos/Classes/FlutterRTCDataChannel.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCDataChannel.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCapturer.h b/macos/Classes/FlutterRTCFrameCapturer.h deleted file mode 100644 index 4b1e4d7a9f..0000000000 --- a/macos/Classes/FlutterRTCFrameCapturer.h +++ /dev/null @@ -1,8 +0,0 @@ -#import -#import - -@interface FlutterRTCFrameCapturer : NSObject - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result; - -@end diff --git a/macos/Classes/FlutterRTCFrameCapturer.h b/macos/Classes/FlutterRTCFrameCapturer.h new file mode 120000 index 0000000000..b732660b2f --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCapturer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCFrameCapturer.m b/macos/Classes/FlutterRTCFrameCapturer.m deleted file mode 100644 index 4c89d41e78..0000000000 --- a/macos/Classes/FlutterRTCFrameCapturer.m +++ /dev/null @@ -1,81 +0,0 @@ -#import -#import "FlutterRTCFrameCapturer.h" - -#include "libyuv.h" - -@import CoreImage; -@import CoreVideo; - -@implementation FlutterRTCFrameCapturer { - RTCVideoTrack* _track; - NSString* _path; - FlutterResult _result; - bool _gotFrame; -} - -- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result -{ - self = [super init]; - if (self) { - _gotFrame = false; - _track = track; - _path = path; - _result = result; - [track addRenderer:self]; - } - return self; -} - -- (void)setSize:(CGSize)size -{ -} - -- (void)renderFrame:(nullable RTCVideoFrame *)frame -{ - if (_gotFrame || frame == nil) return; - _gotFrame = true; - - id buffer = frame.buffer; - CVPixelBufferRef pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; - - CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; - CIContext *context = [CIContext contextWithOptions:nil]; - CGImageRef cgImage = [context createCGImage:ciImage - fromRect:CGRectMake(0, 0, frame.width, frame.height)]; -#if 0 //TODO: frame capture - UIImageOrientation orientation; - switch (frame.rotation) { - case RTCVideoRotation_90: - orientation = UIImageOrientationRight; - break; - case RTCVideoRotation_180: - orientation = UIImageOrientationDown; - break; - case RTCVideoRotation_270: - orientation = UIImageOrientationLeft; - default: - orientation = UIImageOrientationUp; - break; - } - - UIImage *uiImage = [UIImage imageWithCGImage:cgImage scale:1 orientation:orientation]; - CGImageRelease(cgImage); - NSData *jpgData = UIImageJPEGRepresentation(uiImage, 0.9f); - - if ([jpgData writeToFile:_path atomically:NO]) { - NSLog(@"File writed successfully to %@", _path); - _result(nil); - } else { - NSLog(@"Failed to write to file"); - _result([FlutterError errorWithCode:@"CaptureFrameFailed" - message:@"Failed to write JPEG data to file" - details:nil]); - } -#endif - dispatch_async(dispatch_get_main_queue(), ^{ - [self->_track removeRenderer:self]; - self->_track = nil; - }); -} - -@end diff --git a/macos/Classes/FlutterRTCFrameCapturer.m b/macos/Classes/FlutterRTCFrameCapturer.m new file mode 120000 index 0000000000..36b15d7c6a --- /dev/null +++ b/macos/Classes/FlutterRTCFrameCapturer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCFrameCapturer.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCMediaStream.h b/macos/Classes/FlutterRTCMediaStream.h deleted file mode 100644 index 255a472380..0000000000 --- a/macos/Classes/FlutterRTCMediaStream.h +++ /dev/null @@ -1,20 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" - -@interface FlutterWebRTCPlugin (RTCMediaStream) - --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result; - --(void)getSources:(FlutterResult)result; - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track - result:(FlutterResult) result; - --(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track - toPath:(NSString *) path - result:(FlutterResult) result; -@end diff --git a/macos/Classes/FlutterRTCMediaStream.h b/macos/Classes/FlutterRTCMediaStream.h new file mode 120000 index 0000000000..a56c382c17 --- /dev/null +++ b/macos/Classes/FlutterRTCMediaStream.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCMediaStream.m b/macos/Classes/FlutterRTCMediaStream.m deleted file mode 100755 index 5b52c9b65b..0000000000 --- a/macos/Classes/FlutterRTCMediaStream.m +++ /dev/null @@ -1,557 +0,0 @@ -#import - -#import - -#import "FlutterRTCFrameCapturer.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCPeerConnection.h" - -@implementation AVCaptureDevice (Flutter) - -- (NSString*)positionString { - switch (self.position) { - case AVCaptureDevicePositionUnspecified: return @"unspecified"; - case AVCaptureDevicePositionBack: return @"back"; - case AVCaptureDevicePositionFront: return @"front"; - } - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCMediaStream) - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} - */ -typedef void (^NavigatorUserMediaErrorCallback)(NSString *errorType, NSString *errorMessage); - -/** - * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} - */ -typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream *mediaStream); - -- (RTCMediaConstraints *)defaultMediaStreamConstraints { - NSDictionary *mandatoryConstraints - = @{ @"minWidth" : @"1280", - @"minHeight" : @"720", - @"minFrameRate" : @"30" }; - RTCMediaConstraints* constraints = - [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:mandatoryConstraints - optionalConstraints:nil]; - return constraints; -} - -/** - * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the audio-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCAudioTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCAudioTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserAudio:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - NSString *trackId = [[NSUUID UUID] UUIDString]; - RTCAudioTrack *audioTrack - = [self.peerConnectionFactory audioTrackWithTrackId:trackId]; - - [mediaStream addAudioTrack:audioTrack]; - - successCallback(mediaStream); -} - -// TODO: Use RCTConvert for constraints ... --(void)getUserMedia:(NSDictionary *)constraints - result:(FlutterResult) result { - // Initialize RTCMediaStream with a unique label in order to allow multiple - // RTCMediaStream instances initialized by multiple getUserMedia calls to be - // added to 1 RTCPeerConnection instance. As suggested by - // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good - // practice, use a UUID (conforming to RFC4122). - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream - = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - [self - getUserMedia:constraints - successCallback:^ (RTCMediaStream *mediaStream) { - NSString *mediaStreamId = mediaStream.streamId; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in mediaStream.audioTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); - } - errorCallback:^ (NSString *errorType, NSString *errorMessage) { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] - message:errorMessage - details:nil]); - } - mediaStream:mediaStream]; -} - -/** - * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which - * satisfies specific constraints and adds it to a specific - * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track - * of the respective media type and the specified {@code constraints} specify - * that a track of the respective media type is required; otherwise, reports - * success for the specified {@code mediaStream} to a specific - * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media - * type-specific iteration of or successfully concludes the - * {@code getUserMedia()} algorithm. The method will be recursively invoked to - * conclude the whole {@code getUserMedia()} algorithm either with (successful) - * satisfaction of the specified {@code constraints} or with failure. - * - * @param constraints The {@code MediaStreamConstraints} which specifies the - * requested media types and which the new {@code RTCAudioTrack} or - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm. - */ -- (void)getUserMedia:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // If mediaStream contains no audioTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local audio content. - if (mediaStream.audioTracks.count == 0) { - // constraints.audio - id audioConstraints = constraints[@"audio"]; - BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; - if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { - [self requestAccessForMediaType:AVMediaTypeAudio - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } - } - - // If mediaStream contains no videoTracks and the constraints request such a - // track, then run an iteration of the getUserMedia() algorithm to obtain - // local video content. - if (mediaStream.videoTracks.count == 0) { - // constraints.video - id videoConstraints = constraints[@"video"]; - if (videoConstraints) { - BOOL requestAccessForVideo - = [videoConstraints isKindOfClass:[NSNumber class]] - ? [videoConstraints boolValue] - : [videoConstraints isKindOfClass:[NSDictionary class]]; -#if !TARGET_IPHONE_SIMULATOR - if (requestAccessForVideo) { - [self requestAccessForMediaType:AVMediaTypeVideo - constraints:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - return; - } -#endif - } - } - - // There are audioTracks and/or videoTracks in mediaStream as requested by - // constraints so the getUserMedia() is to conclude with success. - successCallback(mediaStream); -} - -/** - * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, - * adds it to a specific {@link RTCMediaStream}, and reports success to a - * specific callback. Implements the video-specific counterpart of the - * {@code getUserMedia()} algorithm. - * - * @param constraints The {@code MediaStreamConstraints} which the new - * {@code RTCVideoTrack} instance is to satisfy. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is being initialized as - * part of the execution of the {@code getUserMedia()} algorithm, to which a - * new {@code RTCVideoTrack} is to be added, and which is to be reported to - * {@code successCallback} upon success. - */ -- (void)getUserVideo:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - id videoConstraints = constraints[@"video"]; - AVCaptureDevice *videoDevice; - if ([videoConstraints isKindOfClass:[NSDictionary class]]) { - // constraints.video.optional - id optionalVideoConstraints = videoConstraints[@"optional"]; - if (optionalVideoConstraints - && [optionalVideoConstraints isKindOfClass:[NSArray class]]) { - NSArray *options = optionalVideoConstraints; - for (id item in options) { - if ([item isKindOfClass:[NSDictionary class]]) { - NSString *sourceId = ((NSDictionary *)item)[@"sourceId"]; - if (sourceId) { - videoDevice = [AVCaptureDevice deviceWithUniqueID:sourceId]; - if (videoDevice) { - break; - } - } - } - } - } - if (!videoDevice) { - // constraints.video.facingMode - // - // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode - id facingMode = videoConstraints[@"facingMode"]; - if (facingMode && [facingMode isKindOfClass:[NSString class]]) { - AVCaptureDevicePosition position; - if ([facingMode isEqualToString:@"environment"]) { - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionBack; - } else if ([facingMode isEqualToString:@"user"]) { - self._usingFrontCamera = YES; - position = AVCaptureDevicePositionFront; - } else { - // If the specified facingMode value is not supported, fall back to - // the default video device. - self._usingFrontCamera = NO; - position = AVCaptureDevicePositionUnspecified; - } - videoDevice = [self findDeviceForPosition:position]; - } - } - if (!videoDevice) { - videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - } - - //TODO(rostopira): refactor to separate function and add support for max - - self._targetWidth = 1280; - self._targetHeight = 720; - self._targetFps = 30; - - id mandatory = videoConstraints[@"mandatory"]; - // constraints.video.mandatory - if(mandatory && [mandatory isKindOfClass:[NSDictionary class]]) - { - id widthConstraint = mandatory[@"minWidth"]; - if ([widthConstraint isKindOfClass:[NSString class]]) { - int possibleWidth = [widthConstraint intValue]; - if (possibleWidth != 0) { - self._targetWidth = possibleWidth; - } - } - id heightConstraint = mandatory[@"minHeight"]; - if ([heightConstraint isKindOfClass:[NSString class]]) { - int possibleHeight = [heightConstraint intValue]; - if (possibleHeight != 0) { - self._targetHeight = possibleHeight; - } - } - id fpsConstraint = mandatory[@"minFrameRate"]; - if ([fpsConstraint isKindOfClass:[NSString class]]) { - int possibleFps = [fpsConstraint intValue]; - if (possibleFps != 0) { - self._targetFps = possibleFps; - } - } - } - - if (videoDevice) { - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - if (self.videoCapturer) { - [self.videoCapturer stopCapture]; - } - self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - NSInteger selectedFps = [self selectFpsForFormat:selectedFormat]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:selectedFps completionHandler:^(NSError *error) { - if (error) { - NSLog(@"Start capture error: %@", [error localizedDescription]); - } - }]; - - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - successCallback(mediaStream); - } else { - // According to step 6.2.3 of the getUserMedia() algorithm, if there is no - // source, fail with a new OverconstrainedError. - errorCallback(@"OverconstrainedError", /* errorMessage */ nil); - } -} - --(void)mediaStreamRelease:(RTCMediaStream *)stream -{ - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:stream.streamId]; - } -} - - -/** - * Obtains local media content of a specific type. Requests access for the - * specified {@code mediaType} if necessary. In other words, implements a media - * type-specific iteration of the {@code getUserMedia()} algorithm. - * - * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} - * which specifies the type of the local media content to obtain. - * @param constraints The {@code MediaStreamConstraints} which are to be - * satisfied by the obtained local media content. - * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which - * success is to be reported. - * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which - * failure is to be reported. - * @param mediaStream The {@link RTCMediaStream} which is to collect the - * obtained local media content of the specified {@code mediaType}. - */ -- (void)requestAccessForMediaType:(NSString *)mediaType - constraints:(NSDictionary *)constraints - successCallback:(NavigatorUserMediaSuccessCallback)successCallback - errorCallback:(NavigatorUserMediaErrorCallback)errorCallback - mediaStream:(RTCMediaStream *)mediaStream { - // According to step 6.2.1 of the getUserMedia() algorithm, if there is no - // source, fail "with a new DOMException object whose name attribute has the - // value NotFoundError." - // XXX The following approach does not work for audio in Simulator. That is - // because audio capture is done using AVAudioSession which does not use - // AVCaptureDevice there. Anyway, Simulator will not (visually) request access - // for audio. - if (mediaType == AVMediaTypeVideo - && [AVCaptureDevice devicesWithMediaType:mediaType].count == 0) { - // Since successCallback and errorCallback are asynchronously invoked - // elsewhere, make sure that the invocation here is consistent. - dispatch_async(dispatch_get_main_queue(), ^ { - errorCallback(@"DOMException", @"NotFoundError"); - }); - return; - } - - [AVCaptureDevice - requestAccessForMediaType:mediaType - completionHandler:^ (BOOL granted) { - dispatch_async(dispatch_get_main_queue(), ^ { - if (granted) { - NavigatorUserMediaSuccessCallback scb - = ^ (RTCMediaStream *mediaStream) { - [self getUserMedia:constraints - successCallback:successCallback - errorCallback:errorCallback - mediaStream:mediaStream]; - }; - - if (mediaType == AVMediaTypeAudio) { - [self getUserAudio:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } else if (mediaType == AVMediaTypeVideo) { - [self getUserVideo:constraints - successCallback:scb - errorCallback:errorCallback - mediaStream:mediaStream]; - } - } else { - // According to step 10 Permission Failure of the getUserMedia() - // algorithm, if the user has denied permission, fail "with a new - // DOMException object whose name attribute has the value - // NotAllowedError." - errorCallback(@"DOMException", @"NotAllowedError"); - } - }); - }]; -} - --(void)getDisplayMedia:(NSDictionary *)constraints - result:(FlutterResult)result { - NSString *mediaStreamId = [[NSUUID UUID] UUIDString]; - RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; - - RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource]; - /* TODO: scree capture - FlutterRPScreenRecorder *screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource]; - - [screenCapturer startCapture]; - - //TODO: - self.videoCapturer = screenCapturer; - */ - NSString *trackUUID = [[NSUUID UUID] UUIDString]; - RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - [mediaStream addVideoTrack:videoTrack]; - - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCVideoTrack *track in mediaStream.videoTracks) { - [self.localTracks setObject:track forKey:track.trackId]; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - self.localStreams[mediaStreamId] = mediaStream; - result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks }); -} - --(void)getSources:(FlutterResult)result{ - NSMutableArray *sources = [NSMutableArray array]; - NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - for (AVCaptureDevice *device in videoDevices) { - [sources addObject:@{ - @"facing": device.positionString, - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"videoinput", - }]; - } - NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; - for (AVCaptureDevice *device in audioDevices) { - [sources addObject:@{ - @"facing": @"", - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"audioinput", - }]; - } - result(@{@"sources": sources}); -} - --(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track -{ - // what's different to mediaStreamTrackStop? only call mediaStream explicitly? - if (mediaStream && track) { - track.isEnabled = NO; - // FIXME this is called when track is removed from the MediaStream, - // but it doesn't mean it can not be added back using MediaStream.addTrack - //TODO: [self.localTracks removeObjectForKey:trackID]; - if ([track.kind isEqualToString:@"audio"]) { - [mediaStream removeAudioTrack:(RTCAudioTrack *)track]; - } else if([track.kind isEqualToString:@"video"]) { - [mediaStream removeVideoTrack:(RTCVideoTrack *)track]; - } - } -} - --(void)mediaStreamTrackSetEnabled:(RTCMediaStreamTrack *)track : (BOOL)enabled -{ - if (track && track.isEnabled != enabled) { - track.isEnabled = enabled; - } -} - --(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't switch camera"); - return; - } - self._usingFrontCamera = !self._usingFrontCamera; - AVCaptureDevicePosition position = self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; - AVCaptureDevice *videoDevice = [self findDeviceForPosition:position]; - AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice:videoDevice]; - [self.videoCapturer startCaptureWithDevice:videoDevice format:selectedFormat fps:[self selectFpsForFormat:selectedFormat] completionHandler:^(NSError* error){ - if (error != nil) { - result([FlutterError errorWithCode:@"Error while switching camera" message:@"Error while switching camera" details:error]); - } else { - result([NSNumber numberWithBool:self._usingFrontCamera]); - } - }]; -} - --(void)mediaStreamTrackCaptureFrame:(RTCVideoTrack *)track toPath:(NSString *) path result:(FlutterResult)result -{ - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't capture frame."); - return; - } - - FlutterRTCFrameCapturer *capturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track toPath:path result:result]; -} - --(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track -{ - if (track) { - track.isEnabled = NO; - [self.localTracks removeObjectForKey:track.trackId]; - } -} - -- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position { - if (position == AVCaptureDevicePositionUnspecified) { - return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; - } - NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; - for (AVCaptureDevice *device in captureDevices) { - if (device.position == position) { - return device; - } - } - return captureDevices[0]; -} - -- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device { - NSArray *formats = - [RTCCameraVideoCapturer supportedFormatsForDevice:device]; - AVCaptureDeviceFormat *selectedFormat = nil; - int currentDiff = INT_MAX; - for (AVCaptureDeviceFormat *format in formats) { - CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); - int diff = abs(self._targetWidth - dimension.width) + abs(self._targetHeight - dimension.height); - if (diff < currentDiff) { - selectedFormat = format; - currentDiff = diff; - } else if (diff == currentDiff && pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { - selectedFormat = format; - } - } - return selectedFormat; -} - -- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format { - Float64 maxSupportedFramerate = 0; - for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) { - maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); - } - return fmin(maxSupportedFramerate, self._targetFps); -} - -@end diff --git a/macos/Classes/FlutterRTCMediaStream.m b/macos/Classes/FlutterRTCMediaStream.m new file mode 120000 index 0000000000..2e988ad614 --- /dev/null +++ b/macos/Classes/FlutterRTCMediaStream.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaStream.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCPeerConnection.h b/macos/Classes/FlutterRTCPeerConnection.h deleted file mode 100755 index b99f885b0a..0000000000 --- a/macos/Classes/FlutterRTCPeerConnection.h +++ /dev/null @@ -1,43 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -@interface RTCPeerConnection (Flutter) -@property (nonatomic, strong) NSMutableDictionary *dataChannels; -@property (nonatomic, strong) NSMutableDictionary *remoteStreams; -@property (nonatomic, strong) NSMutableDictionary *remoteTracks; -@property (nonatomic, strong) NSString *flutterId; -@property (nonatomic, strong) FlutterEventSink eventSink; -@property (nonatomic, strong) FlutterEventChannel* eventChannel; -@end - -@interface FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result; - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result; - --(RTCMediaConstraints *) parseMediaConstraints:(nonnull NSDictionary *)constraints; - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection; - -@end diff --git a/macos/Classes/FlutterRTCPeerConnection.h b/macos/Classes/FlutterRTCPeerConnection.h new file mode 120000 index 0000000000..c4907a3db8 --- /dev/null +++ b/macos/Classes/FlutterRTCPeerConnection.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCPeerConnection.m b/macos/Classes/FlutterRTCPeerConnection.m deleted file mode 100755 index 6517f85ffe..0000000000 --- a/macos/Classes/FlutterRTCPeerConnection.m +++ /dev/null @@ -1,504 +0,0 @@ -#import -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCDataChannel.h" - -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import -#import - -@implementation RTCPeerConnection (Flutter) - -@dynamic eventSink; - -- (NSString *)flutterId -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setFlutterId:(NSString *)flutterId -{ - objc_setAssociatedObject(self, @selector(flutterId), flutterId, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventSink)eventSink -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventSink:(FlutterEventSink)eventSink -{ - objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (FlutterEventChannel *)eventChannel -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setEventChannel:(FlutterEventChannel *)eventChannel -{ - objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)dataChannels -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setDataChannels:(NSMutableDictionary *)dataChannels -{ - objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteStreams -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteStreams:(NSMutableDictionary *)remoteStreams -{ - objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -- (NSMutableDictionary *)remoteTracks -{ - return objc_getAssociatedObject(self, _cmd); -} - -- (void)setRemoteTracks:(NSMutableDictionary *)remoteTracks -{ - objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, OBJC_ASSOCIATION_RETAIN_NONATOMIC); -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - self.eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - self.eventSink = sink; - return nil; -} - -@end - -@implementation FlutterWebRTCPlugin (RTCPeerConnection) - --(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration - peerConnection:(RTCPeerConnection*)peerConnection -{ - [peerConnection setConfiguration:configuration]; -} - --(void) peerConnectionCreateOffer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection*)peerConnection - result:(FlutterResult)result -{ - [peerConnection - offerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateOfferFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionCreateAnswer:(NSDictionary *)constraints - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection - answerForConstraints:[self parseMediaConstraints:constraints] - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"CreateAnswerFailed" - message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] - details:nil]); - } else { - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } - }]; -} - --(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setLocalDescription:sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetLocalDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection setRemoteDescription: sdp completionHandler: ^(NSError *error) { - if (error) { - result([FlutterError errorWithCode:@"SetRemoteDescriptionFailed" - message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] - details:nil]); - } else { - result(nil); - } - }]; -} - --(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate - peerConnection:(RTCPeerConnection *)peerConnection - result:(FlutterResult)result -{ - [peerConnection addIceCandidate:candidate]; - result(nil); - //NSLog(@"addICECandidateresult: %@", candidate); -} - --(void) peerConnectionClose:(RTCPeerConnection *)peerConnection -{ - [peerConnection close]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels - = peerConnection.dataChannels; - for (NSString *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; -} - --(void) peerConnectionGetStats:(nonnull NSString *)trackID - peerConnection:(nonnull RTCPeerConnection *)peerConnection - result:(nonnull FlutterResult)result -{ - RTCMediaStreamTrack *track = nil; - if (!trackID - || !trackID.length - || (track = self.localTracks[trackID]) - || (track = peerConnection.remoteTracks[trackID])) { - [peerConnection statsForTrack:track - statsOutputLevel:RTCStatsOutputLevelStandard - completionHandler:^(NSArray *reports) { - - NSMutableArray *stats = [NSMutableArray array]; - - for (RTCLegacyStatsReport *report in reports) { - [stats addObject:@{@"id": report.reportId, - @"type": report.type, - @"timestamp": @(report.timestamp), - @"values": report.values - }]; - } - - result(@{@"stats": stats}); - }]; - }else{ - result([FlutterError errorWithCode:@"GetStatsFailed" - message:[NSString stringWithFormat:@"Error %@", @""] - details:nil]); - } -} - -- (NSString *)stringForICEConnectionState:(RTCIceConnectionState)state { - switch (state) { - case RTCIceConnectionStateNew: return @"new"; - case RTCIceConnectionStateChecking: return @"checking"; - case RTCIceConnectionStateConnected: return @"connected"; - case RTCIceConnectionStateCompleted: return @"completed"; - case RTCIceConnectionStateFailed: return @"failed"; - case RTCIceConnectionStateDisconnected: return @"disconnected"; - case RTCIceConnectionStateClosed: return @"closed"; - case RTCIceConnectionStateCount: return @"count"; - } - return nil; -} - -- (NSString *)stringForICEGatheringState:(RTCIceGatheringState)state { - switch (state) { - case RTCIceGatheringStateNew: return @"new"; - case RTCIceGatheringStateGathering: return @"gathering"; - case RTCIceGatheringStateComplete: return @"complete"; - } - return nil; -} - -- (NSString *)stringForSignalingState:(RTCSignalingState)state { - switch (state) { - case RTCSignalingStateStable: return @"stable"; - case RTCSignalingStateHaveLocalOffer: return @"have-local-offer"; - case RTCSignalingStateHaveLocalPrAnswer: return @"have-local-pranswer"; - case RTCSignalingStateHaveRemoteOffer: return @"have-remote-offer"; - case RTCSignalingStateHaveRemotePrAnswer: return @"have-remote-pranswer"; - case RTCSignalingStateClosed: return @"closed"; - } - return nil; -} - - -/** - * Parses the constraint keys and values of a specific JavaScript object into - * a specific NSMutableDictionary in a format suitable for the - * initialization of a RTCMediaConstraints instance. - * - * @param src The JavaScript object which defines constraint keys and values and - * which is to be parsed into the specified dst. - * @param dst The NSMutableDictionary into which the constraint keys - * and values defined by src are to be written in a format suitable for - * the initialization of a RTCMediaConstraints instance. - */ -- (void)parseJavaScriptConstraints:(NSDictionary *)src - intoWebRTCConstraints:(NSMutableDictionary *)dst { - for (id srcKey in src) { - id srcValue = src[srcKey]; - NSString *dstValue; - - if ([srcValue isKindOfClass:[NSNumber class]]) { - dstValue = [srcValue boolValue] ? @"true" : @"false"; - } else { - dstValue = [srcValue description]; - } - dst[[srcKey description]] = dstValue; - } -} - -/** - * Parses a JavaScript object into a new RTCMediaConstraints instance. - * - * @param constraints The JavaScript object to parse into a new - * RTCMediaConstraints instance. - * @returns A new RTCMediaConstraints instance initialized with the - * mandatory and optional constraint keys and values specified by - * constraints. - */ -- (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints { - id mandatory = constraints[@"mandatory"]; - NSMutableDictionary *mandatory_ - = [NSMutableDictionary new]; - - if ([mandatory isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)mandatory - intoWebRTCConstraints:mandatory_]; - } - - id optional = constraints[@"optional"]; - NSMutableDictionary *optional_ - = [NSMutableDictionary new]; - - if ([optional isKindOfClass:[NSArray class]]) { - for (id o in (NSArray *)optional) { - if ([o isKindOfClass:[NSDictionary class]]) { - [self parseJavaScriptConstraints:(NSDictionary *)o - intoWebRTCConstraints:optional_]; - } - } - } - - return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ - optionalConstraints:optional_]; -} - -#pragma mark - RTCPeerConnectionDelegate methods - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingState:(RTCSignalingState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"signalingState", - @"state" : [self stringForSignalingState:newState]}); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didAddTrack:(RTCVideoTrack*)track{ - - peerConnection.remoteTracks[track.trackId] = track; - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - --(void)peerConnection:(RTCPeerConnection *)peerConnection - mediaStream:(RTCMediaStream *)stream didRemoveTrack:(RTCVideoTrack*)track{ - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - NSString *streamId = stream.streamId; - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveTrack", - @"streamId": streamId, - @"trackId": track.trackId, - @"track": @{ - @"id": track.trackId, - @"kind": track.kind, - @"label": track.trackId, - @"enabled": @(track.isEnabled), - @"remote": @(YES), - @"readyState": @"live"} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream { - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCAudioTrack *track in stream.audioTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - for (RTCVideoTrack *track in stream.videoTracks) { - peerConnection.remoteTracks[track.trackId] = track; - [videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}]; - } - - NSString *streamId = stream.streamId; - peerConnection.remoteStreams[streamId] = stream; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onAddStream", - @"streamId": streamId, - @"audioTracks": audioTracks, - @"videoTracks": videoTracks, - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream { - NSArray *keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; - // We assume there can be only one object for 1 key - if (keysArray.count > 1) { - NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", stream.streamId); - } - NSString *streamId = stream.streamId; - - for (RTCVideoTrack *track in stream.videoTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - for (RTCAudioTrack *track in stream.audioTracks) { - [peerConnection.remoteTracks removeObjectForKey:track.trackId]; - } - [peerConnection.remoteStreams removeObjectForKey:streamId]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onRemoveStream", - @"streamId": streamId, - }); - } -} - -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{@"event" : @"onRenegotiationNeeded",}); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceConnectionState", - @"state" : [self stringForICEConnectionState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"iceGatheringState", - @"state" : [self stringForICEGatheringState:newState] - }); - } -} - -- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate { - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"onCandidate", - @"candidate" : @{@"candidate": candidate.sdp, @"sdpMLineIndex": @(candidate.sdpMLineIndex), @"sdpMid": candidate.sdpMid} - }); - } -} - -- (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RTCDataChannel*)dataChannel { - if (-1 == dataChannel.channelId) { - return; - } - - NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; - dataChannel.peerConnectionId = peerConnection.flutterId; - dataChannel.delegate = self; - peerConnection.dataChannels[dataChannelId] = dataChannel; - - FlutterEventChannel *eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnection.flutterId, dataChannel.channelId] - binaryMessenger:self.messenger]; - - dataChannel.eventChannel = eventChannel; - dataChannel.flutterChannelId = dataChannelId; - [eventChannel setStreamHandler:dataChannel]; - - FlutterEventSink eventSink = peerConnection.eventSink; - if(eventSink){ - eventSink(@{ - @"event" : @"didOpenDataChannel", - @"id": dataChannelId, - @"label": dataChannel.label - }); - } -} - -@end - diff --git a/macos/Classes/FlutterRTCPeerConnection.m b/macos/Classes/FlutterRTCPeerConnection.m new file mode 120000 index 0000000000..363aecf0c7 --- /dev/null +++ b/macos/Classes/FlutterRTCPeerConnection.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCPeerConnection.m \ No newline at end of file diff --git a/macos/Classes/FlutterRTCVideoRenderer.h b/macos/Classes/FlutterRTCVideoRenderer.h deleted file mode 100755 index 7fdc81b253..0000000000 --- a/macos/Classes/FlutterRTCVideoRenderer.h +++ /dev/null @@ -1,28 +0,0 @@ -#import "FlutterWebRTCPlugin.h" - -@interface FlutterRTCVideoRenderer : NSObject - -/** - * The {@link RTCVideoTrack}, if any, which this instance renders. - */ -@property (nonatomic, strong) RTCVideoTrack *videoTrack; -@property (nonatomic) int64_t textureId; -@property (nonatomic, weak) id registry; -@property (nonatomic, strong) FlutterEventSink eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - -- (void)dispose; - -@end - - -@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger; - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view peerConnectionId:(NSString *)peerConnectionId; - -@end diff --git a/macos/Classes/FlutterRTCVideoRenderer.h b/macos/Classes/FlutterRTCVideoRenderer.h new file mode 120000 index 0000000000..2e68777e02 --- /dev/null +++ b/macos/Classes/FlutterRTCVideoRenderer.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.h \ No newline at end of file diff --git a/macos/Classes/FlutterRTCVideoRenderer.m b/macos/Classes/FlutterRTCVideoRenderer.m deleted file mode 100755 index 928051960d..0000000000 --- a/macos/Classes/FlutterRTCVideoRenderer.m +++ /dev/null @@ -1,294 +0,0 @@ -#import "FlutterRTCVideoRenderer.h" -#import "FlutterWebRTCPlugin.h" - -#import -#import -#import -#import -#include "libyuv.h" - -@implementation FlutterRTCVideoRenderer { - CGSize _frameSize; - CGSize _renderSize; - CVPixelBufferRef _pixelBufferRef; - RTCVideoRotation _rotation; - FlutterEventChannel* _eventChannel; - bool _isFirstFrameRendered; -} - -@synthesize textureId = _textureId; -@synthesize registry = _registry; -@synthesize eventSink = _eventSink; - -- (instancetype)initWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - self = [super init]; - if (self){ - _isFirstFrameRendered = false; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - _registry = registry; - _pixelBufferRef = nil; - _eventSink = nil; - _rotation = -1; - _textureId = [registry registerTexture:self]; - /*Create Event Channel.*/ - _eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId] - binaryMessenger:messenger]; - [_eventChannel setStreamHandler:self]; - } - return self; -} - --(void)dealloc { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } -} - -- (CVPixelBufferRef)copyPixelBuffer:(size_t)width height:(size_t)height { - if(_pixelBufferRef != nil) { - RTCCVPixelBuffer *rtcPixelbuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:_pixelBufferRef]; - CVPixelBufferRef outbuffer; - CVPixelBufferCreate(kCFAllocatorDefault, - width, height, - kCVPixelFormatType_32BGRA, - nil, &outbuffer); - - [rtcPixelbuffer cropAndScaleTo:outbuffer withTempBuffer:CVPixelBufferGetBaseAddress(outbuffer)]; - return outbuffer; - } - return nil; -} - -- (CVPixelBufferRef)copyPixelBuffer { - if(_pixelBufferRef != nil){ - CVBufferRetain(_pixelBufferRef); - return _pixelBufferRef; - } - return nil; -} - --(void)dispose{ - [_registry unregisterTexture:_textureId]; -} - -- (void)setVideoTrack:(RTCVideoTrack *)videoTrack { - RTCVideoTrack *oldValue = self.videoTrack; - - if (oldValue != videoTrack) { - _isFirstFrameRendered = false; - if (oldValue) { - [oldValue removeRenderer:self]; - } - _videoTrack = videoTrack; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - if (videoTrack) { - [videoTrack addRenderer:self]; - } - } -} - - --(id) correctRotation:(const id) src - withRotation:(RTCVideoRotation) rotation -{ - - int rotated_width = src.width; - int rotated_height = src.height; - - if (rotation == RTCVideoRotation_90 || - rotation == RTCVideoRotation_270) { - int temp = rotated_width; - rotated_width = rotated_height; - rotated_height = temp; - } - - id buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width height:rotated_height]; - - I420Rotate(src.dataY, src.strideY, - src.dataU, src.strideU, - src.dataV, src.strideV, - (uint8_t*)buffer.dataY, buffer.strideY, - (uint8_t*)buffer.dataU,buffer.strideU, - (uint8_t*)buffer.dataV, buffer.strideV, - src.width, src.height, - (RotationModeEnum)rotation); - - return buffer; -} - --(void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer withFrame:(RTCVideoFrame *) frame -{ - id i420Buffer = [self correctRotation:[frame.buffer toI420] withRotation:frame.rotation]; - CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); - - const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); - if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || - pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { - // NV12 - uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); - const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); - uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); - const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); - - I420ToNV12(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dstY, - (int)dstYStride, - dstUV, - (int)dstUVStride, - i420Buffer.width, - i420Buffer.height); - } else { - uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); - const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); - - if (pixelFormat == kCVPixelFormatType_32BGRA) { - // Corresponds to libyuv::FOURCC_ARGB - I420ToARGB(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } else if (pixelFormat == kCVPixelFormatType_32ARGB) { - // Corresponds to libyuv::FOURCC_BGRA - I420ToBGRA(i420Buffer.dataY, - i420Buffer.strideY, - i420Buffer.dataU, - i420Buffer.strideU, - i420Buffer.dataV, - i420Buffer.strideV, - dst, - (int)bytesPerRow, - i420Buffer.width, - i420Buffer.height); - } - } - - CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); -} - -#pragma mark - RTCVideoRenderer methods -- (void)renderFrame:(RTCVideoFrame *)frame { - - [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; - - __weak FlutterRTCVideoRenderer *weakSelf = self; - if(_renderSize.width != frame.width || _renderSize.height != frame.height){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeVideoSize", - @"id": @(strongSelf.textureId), - @"width": @(frame.width), - @"height": @(frame.height), - }); - } - }); - _renderSize = CGSizeMake(frame.width, frame.height); - } - - if(frame.rotation != _rotation){ - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - if(strongSelf.eventSink){ - strongSelf.eventSink(@{ - @"event" : @"didTextureChangeRotation", - @"id": @(strongSelf.textureId), - @"rotation": @(frame.rotation), - }); - } - }); - - _rotation = frame.rotation; - } - - //Notify the Flutter new pixelBufferRef to be ready. - dispatch_async(dispatch_get_main_queue(), ^{ - FlutterRTCVideoRenderer *strongSelf = weakSelf; - [strongSelf.registry textureFrameAvailable:strongSelf.textureId]; - if (!strongSelf->_isFirstFrameRendered) { - if (strongSelf.eventSink) { - strongSelf.eventSink(@{@"event":@"didFirstFrameRendered"}); - strongSelf->_isFirstFrameRendered = true; - } - } - }); -} - -/** - * Sets the size of the video frame to render. - * - * @param size The size of the video frame to render. - */ -- (void)setSize:(CGSize)size { - if(_pixelBufferRef == nil || (size.width != _frameSize.width || size.height != _frameSize.height)) - { - if(_pixelBufferRef){ - CVBufferRelease(_pixelBufferRef); - } - NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; - CVPixelBufferCreate(kCFAllocatorDefault, - size.width, size.height, - kCVPixelFormatType_32BGRA, - (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); - - _frameSize = size; - } -} - -#pragma mark - FlutterStreamHandler methods - -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - _eventSink = nil; - return nil; -} - -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - _eventSink = sink; - return nil; -} -@end - -@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) - -- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id)registry - messenger:(NSObject*)messenger{ - return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger]; -} - --(void)setStreamId:(NSString*)streamId view:(FlutterRTCVideoRenderer*)view peerConnectionId:(NSString *)peerConnectionId{ - - RTCVideoTrack *videoTrack; - RTCMediaStream *stream = [self streamForId:streamId peerConnectionId:peerConnectionId]; - if(stream){ - NSArray *videoTracks = stream ? stream.videoTracks : nil; - videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; - if (!videoTrack) { - NSLog(@"No video track for RTCMediaStream: %@", streamId); - } - } else { - videoTrack = nil; - } - - view.videoTrack = videoTrack; -} - -@end - diff --git a/macos/Classes/FlutterRTCVideoRenderer.m b/macos/Classes/FlutterRTCVideoRenderer.m new file mode 120000 index 0000000000..77a0efd6d2 --- /dev/null +++ b/macos/Classes/FlutterRTCVideoRenderer.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCVideoRenderer.m \ No newline at end of file diff --git a/macos/Classes/FlutterWebRTCPlugin.h b/macos/Classes/FlutterWebRTCPlugin.h deleted file mode 100644 index 993169ea64..0000000000 --- a/macos/Classes/FlutterWebRTCPlugin.h +++ /dev/null @@ -1,23 +0,0 @@ -#import -#import -#import - -@class FlutterRTCVideoRenderer; - -@interface FlutterWebRTCPlugin : NSObject - -@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory; -@property (nonatomic, strong) NSMutableDictionary *peerConnections; -@property (nonatomic, strong) NSMutableDictionary *localStreams; -@property (nonatomic, strong) NSMutableDictionary *localTracks; -@property (nonatomic, strong) NSMutableDictionary *renders; -@property (nonatomic, strong) NSObject* messenger; -@property (nonatomic, strong) RTCCameraVideoCapturer *videoCapturer; -@property (nonatomic) BOOL _usingFrontCamera; -@property (nonatomic) int _targetWidth; -@property (nonatomic) int _targetHeight; -@property (nonatomic) int _targetFps; - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId; - -@end diff --git a/macos/Classes/FlutterWebRTCPlugin.h b/macos/Classes/FlutterWebRTCPlugin.h new file mode 120000 index 0000000000..b8713b38ef --- /dev/null +++ b/macos/Classes/FlutterWebRTCPlugin.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.h \ No newline at end of file diff --git a/macos/Classes/FlutterWebRTCPlugin.m b/macos/Classes/FlutterWebRTCPlugin.m deleted file mode 100644 index bfbe4535aa..0000000000 --- a/macos/Classes/FlutterWebRTCPlugin.m +++ /dev/null @@ -1,721 +0,0 @@ -#import "FlutterWebRTCPlugin.h" -#import "FlutterRTCPeerConnection.h" -#import "FlutterRTCMediaStream.h" -#import "FlutterRTCDataChannel.h" -#import "FlutterRTCVideoRenderer.h" - -#import - -@implementation FlutterWebRTCPlugin { - FlutterMethodChannel *_methodChannel; - id _registry; - id _messenger; - id _textures; -} - -@synthesize messenger = _messenger; - -+ (void)registerWithRegistrar:(NSObject*)registrar { - - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"FlutterWebRTC.Method" - binaryMessenger:[registrar messenger]]; - FlutterWebRTCPlugin* instance = [[FlutterWebRTCPlugin alloc] initWithChannel:channel - registrar:registrar - messenger:[registrar messenger] - withTextures:[registrar textures]]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithChannel:(FlutterMethodChannel *)channel - registrar:(NSObject*)registrar - messenger:(NSObject*)messenger - withTextures:(NSObject *)textures{ - - self = [super init]; - - if (self) { - _methodChannel = channel; - _registry = registrar; - _textures = textures; - _messenger = messenger; - } - - RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init]; - RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init]; - - _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] - initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory]; - - - self.peerConnections = [NSMutableDictionary new]; - self.localStreams = [NSMutableDictionary new]; - self.localTracks = [NSMutableDictionary new]; - self.renders = [[NSMutableDictionary alloc] init]; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result { - - if ([@"createPeerConnection" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* configuration = argsMap[@"configuration"]; - NSDictionary* constraints = argsMap[@"constraints"]; - - RTCPeerConnection *peerConnection = [self.peerConnectionFactory - peerConnectionWithConfiguration:[self RTCConfiguration:configuration] - constraints:[self parseMediaConstraints:constraints] - delegate:self]; - - peerConnection.remoteStreams = [NSMutableDictionary new]; - peerConnection.remoteTracks = [NSMutableDictionary new]; - peerConnection.dataChannels = [NSMutableDictionary new]; - - NSString *peerConnectionId = [[NSUUID UUID] UUIDString]; - peerConnection.flutterId = peerConnectionId; - - /*Create Event Channel.*/ - peerConnection.eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/peerConnectoinEvent%@", peerConnectionId] - binaryMessenger:_messenger]; - [peerConnection.eventChannel setStreamHandler:peerConnection]; - - self.peerConnections[peerConnectionId] = peerConnection; - result(@{ @"peerConnectionId" : peerConnectionId}); - } else if ([@"getUserMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getUserMedia:constraints result:result]; - } else if ([@"getDisplayMedia" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - [self getDisplayMedia:constraints result:result]; - } else if ([@"getSources" isEqualToString:call.method]) { - [self getSources:result]; - } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - [self mediaStreamGetTracks:streamId result:result]; - } else if ([@"createOffer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary* constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result ]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"createAnswer" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSDictionary * constraints = argsMap[@"constraints"]; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - { - [self peerConnectionCreateAnswer:constraints - peerConnection:peerConnection - result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection addStream:stream]; - result(@""); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"removeStream" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - - NSString* streamId = ((NSString*)argsMap[@"streamId"]); - RTCMediaStream *stream = self.localStreams[streamId]; - - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection && stream){ - [peerConnection removeStream:stream]; - result(nil); - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"] - details:nil]); - } - } else if ([@"captureFrame" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* path = argsMap[@"path"]; - NSString* trackId = argsMap[@"trackId"]; - - RTCMediaStreamTrack *track = [self trackForId: trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - if(peerConnection) - { - [self peerConnectionSetLocalDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - NSDictionary *descriptionMap = argsMap[@"description"]; - NSString* sdp = descriptionMap[@"sdp"]; - RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; - RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType sdp:sdp]; - - if(peerConnection) - { - [self peerConnectionSetRemoteDescription:description peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"sendDtmf" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* tone = argsMap[@"tone"]; - int duration = ((NSNumber*)argsMap[@"duration"]).intValue; - int interToneGap = ((NSNumber*)argsMap[@"gap"]).intValue; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - - RTCRtpSender* audioSender = nil ; - for( RTCRtpSender *rtpSender in peerConnection.senders){ - if([[[rtpSender track] kind] isEqualToString:@"audio"]) { - audioSender = rtpSender; - } - } - if(audioSender){ - NSOperationQueue *queue = [[NSOperationQueue alloc] init]; - [queue addOperationWithBlock:^{ - double durationMs = duration / 1000.0; - double interToneGapMs = interToneGap / 1000.0; - [audioSender.dtmfSender insertDtmf :(NSString *)tone - duration:(NSTimeInterval) durationMs interToneGap:(NSTimeInterval)interToneGapMs]; - NSLog(@"DTMF Tone played "); - }]; - } - - result(@{@"result": @"success"}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"addCandidate" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSDictionary* candMap = argsMap[@"candidate"]; - NSString *sdp = candMap[@"candidate"]; - int sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; - NSString *sdpMid = candMap[@"sdpMid"]; - - RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp sdpMLineIndex:sdpMLineIndex sdpMid:sdpMid]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - - if(peerConnection) - { - [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; - }else{ - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getStats" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* trackId = argsMap[@"trackId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) - return [self peerConnectionGetStats:trackId peerConnection:peerConnection result:result]; - result(nil); - } else if ([@"createDataChannel" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* label = argsMap[@"label"]; - NSDictionary * dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; - [self createDataChannel:peerConnectionId - label:label - config:[self RTCDataChannelConfiguration:dataChannelDict] - messenger:_messenger]; - result(nil); - } else if ([@"dataChannelSend" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - NSString* type = argsMap[@"type"]; - id data = argsMap[@"data"]; - - [self dataChannelSend:peerConnectionId - dataChannelId:dataChannelId - data:data - type:type]; - result(nil); - } else if ([@"dataChannelClose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - NSString* dataChannelId = argsMap[@"dataChannelId"]; - [self dataChannelClose:peerConnectionId - dataChannelId:dataChannelId]; - result(nil); - } else if ([@"streamDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* streamId = argsMap[@"streamId"]; - RTCMediaStream *stream = self.localStreams[streamId]; - if (stream) { - for (RTCVideoTrack *track in stream.videoTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - RTCVideoSource *source = videoTrack.source; - if(source){ - [self.videoCapturer stopCapture]; - self.videoCapturer = nil; - } - } - for (RTCAudioTrack *track in stream.audioTracks) { - [self.localTracks removeObjectForKey:track.trackId]; - } - [self.localStreams removeObjectForKey:streamId]; - } - result(nil); - } else if ([@"mediaStreamTrackSetEnable" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* enabled = argsMap[@"enabled"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if(track != nil){ - track.isEnabled = enabled.boolValue; - } - result(nil); - } else if ([@"trackDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - [self.localTracks removeObjectForKey:trackId]; - result(nil); - } else if ([@"peerConnectionClose" isEqualToString:call.method] || [@"peerConnectionDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if (peerConnection) { - [peerConnection close]; - [self.peerConnections removeObjectForKey:peerConnectionId]; - - // Clean up peerConnection's streams and tracks - [peerConnection.remoteStreams removeAllObjects]; - [peerConnection.remoteTracks removeAllObjects]; - - // Clean up peerConnection's dataChannels. - NSMutableDictionary *dataChannels = peerConnection.dataChannels; - for (NSNumber *dataChannelId in dataChannels) { - dataChannels[dataChannelId].delegate = nil; - // There is no need to close the RTCDataChannel because it is owned by the - // RTCPeerConnection and the latter will close the former. - } - [dataChannels removeAllObjects]; - } - result(nil); - } else if ([@"createVideoRenderer" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - FlutterRTCVideoRenderer* render = [self createWithTextureRegistry:_textures - messenger:_messenger]; - self.renders[@(render.textureId)] = render; - result(@{@"textureId": @(render.textureId)}); - } else if ([@"videoRendererDispose" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - render.videoTrack = nil; - [render dispose]; - [self.renders removeObjectForKey:textureId]; - result(nil); - } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber *textureId = argsMap[@"textureId"]; - FlutterRTCVideoRenderer *render = self.renders[textureId]; - NSString *streamId = argsMap[@"streamId"]; - NSString *peerConnectionId = argsMap[@"peerConnectionId"]; - if(render){ - [self setStreamId:streamId view:render peerConnectionId:peerConnectionId]; - } - result(nil); - } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack *videoTrack = (RTCVideoTrack *)track; - [self mediaStreamTrackSwitchCamera:videoTrack result:result]; - } else { - if (track == nil) { - result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); - } else { - result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]); - } - } - } else if ([@"setVolume" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* volume = argsMap[@"volume"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - RTCAudioSource *audioSource = audioTrack.source; - audioSource.volume = [volume doubleValue]; - } - result(nil); - } else if ([@"setMicrophoneMute" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* trackId = argsMap[@"trackId"]; - NSNumber* mute = argsMap[@"mute"]; - RTCMediaStreamTrack *track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack *audioTrack = (RTCAudioTrack *)track; - audioTrack.isEnabled = !mute.boolValue; - } - result(nil); - } else if ([@"enableSpeakerphone" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSNumber* enable = argsMap[@"enable"]; -#if 0 - AVAudioSession *audioSession = [AVAudioSession sharedInstance]; - [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord - withOptions:enable.boolValue ? AVAudioSessionCategoryOptionDefaultToSpeaker : 0 - error:nil]; - [audioSession setActive:YES error:nil]; -#endif - result(nil); - } else if ([@"getLocalDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.localDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"getRemoteDescription" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"peerConnectionId"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - RTCSessionDescription* sdp = peerConnection.remoteDescription; - NSString *type = [RTCSessionDescription stringForType:sdp.type]; - result(@{@"sdp": sdp.sdp, @"type": type}); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else if ([@"setConfiguration" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSString* peerConnectionId = argsMap[@"ownerTag"]; - NSDictionary* configuration = argsMap[@"configuration"]; - RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; - if(peerConnection) { - [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] peerConnection:peerConnection]; - result(nil); - } else { - result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed",call.method] - message:[NSString stringWithFormat:@"Error: peerConnection not found!"] - details:nil]); - } - } else { - result(FlutterMethodNotImplemented); - } -} - -- (void)dealloc -{ - [_localTracks removeAllObjects]; - _localTracks = nil; - [_localStreams removeAllObjects]; - _localStreams = nil; - - for (NSString *peerConnectionId in _peerConnections) { - RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId]; - peerConnection.delegate = nil; - [peerConnection close]; - } - [_peerConnections removeAllObjects]; - _peerConnectionFactory = nil; -} - - --(void)mediaStreamGetTracks:(NSString*)streamId - result:(FlutterResult)result { - RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""]; - if(stream){ - NSMutableArray *audioTracks = [NSMutableArray array]; - NSMutableArray *videoTracks = [NSMutableArray array]; - - for (RTCMediaStreamTrack *track in stream.audioTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [audioTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - for (RTCMediaStreamTrack *track in stream.videoTracks) { - NSString *trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; - [videoTracks addObject:@{ - @"enabled": @(track.isEnabled), - @"id": trackId, - @"kind": track.kind, - @"label": trackId, - @"readyState": @"live", - @"remote": @(NO) - }]; - } - - result(@{@"audioTracks": audioTracks, @"videoTracks" : videoTracks }); - }else{ - result(nil); - } -} - -- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId -{ - RTCMediaStream *stream = _localStreams[streamId]; - if (!stream) { - if (peerConnectionId.length > 0) { - RTCPeerConnection *peerConnection = [_peerConnections objectForKey:peerConnectionId]; - stream = peerConnection.remoteStreams[streamId]; - } else { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - stream = peerConnection.remoteStreams[streamId]; - if (stream) { - break; - } - } - } - } - return stream; -} - -- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId -{ - RTCMediaStreamTrack *track = _localTracks[trackId]; - if (!track) { - for (RTCPeerConnection *peerConnection in _peerConnections.allValues) { - track = peerConnection.remoteTracks[trackId]; - if (track) { - break; - } - } - } - - return track; -} - -- (RTCIceServer *)RTCIceServer:(id)json -{ - if (!json) { - NSLog(@"a valid iceServer value"); - return nil; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return nil; - } - - NSArray *urls; - if ([json[@"url"] isKindOfClass:[NSString class]]) { - // TODO: 'url' is non-standard - urls = @[json[@"url"]]; - } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { - urls = @[json[@"urls"]]; - } else { - urls = (NSArray*)json[@"urls"]; - } - - if (json[@"username"] != nil || json[@"credential"] != nil) { - return [[RTCIceServer alloc]initWithURLStrings:urls - username:json[@"username"] - credential:json[@"credential"]]; - } - - return [[RTCIceServer alloc] initWithURLStrings:urls]; -} - - -- (nonnull RTCConfiguration *)RTCConfiguration:(id)json -{ - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - - if (!json) { - return config; - } - - if (![json isKindOfClass:[NSDictionary class]]) { - NSLog(@"must be an object"); - return config; - } - - if (json[@"audioJitterBufferMaxPackets"] != nil && [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { - config.audioJitterBufferMaxPackets = [json[@"audioJitterBufferMaxPackets"] intValue]; - } - - if (json[@"bundlePolicy"] != nil && [json[@"bundlePolicy"] isKindOfClass:[NSString class]]) { - NSString *bundlePolicy = json[@"bundlePolicy"]; - if ([bundlePolicy isEqualToString:@"balanced"]) { - config.bundlePolicy = RTCBundlePolicyBalanced; - } else if ([bundlePolicy isEqualToString:@"max-compat"]) { - config.bundlePolicy = RTCBundlePolicyMaxCompat; - } else if ([bundlePolicy isEqualToString:@"max-bundle"]) { - config.bundlePolicy = RTCBundlePolicyMaxBundle; - } - } - - if (json[@"iceBackupCandidatePairPingInterval"] != nil && [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { - config.iceBackupCandidatePairPingInterval = [json[@"iceBackupCandidatePairPingInterval"] intValue]; - } - - if (json[@"iceConnectionReceivingTimeout"] != nil && [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { - config.iceConnectionReceivingTimeout = [json[@"iceConnectionReceivingTimeout"] intValue]; - } - - if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { - NSMutableArray *iceServers = [NSMutableArray new]; - for (id server in json[@"iceServers"]) { - RTCIceServer *convert = [self RTCIceServer:server]; - if (convert != nil) { - [iceServers addObject:convert]; - } - } - config.iceServers = iceServers; - } - - if (json[@"iceTransportPolicy"] != nil && [json[@"iceTransportPolicy"] isKindOfClass:[NSString class]]) { - NSString *iceTransportPolicy = json[@"iceTransportPolicy"]; - if ([iceTransportPolicy isEqualToString:@"all"]) { - config.iceTransportPolicy = RTCIceTransportPolicyAll; - } else if ([iceTransportPolicy isEqualToString:@"none"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNone; - } else if ([iceTransportPolicy isEqualToString:@"nohost"]) { - config.iceTransportPolicy = RTCIceTransportPolicyNoHost; - } else if ([iceTransportPolicy isEqualToString:@"relay"]) { - config.iceTransportPolicy = RTCIceTransportPolicyRelay; - } - } - - if (json[@"rtcpMuxPolicy"] != nil && [json[@"rtcpMuxPolicy"] isKindOfClass:[NSString class]]) { - NSString *rtcpMuxPolicy = json[@"rtcpMuxPolicy"]; - if ([rtcpMuxPolicy isEqualToString:@"negotiate"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate; - } else if ([rtcpMuxPolicy isEqualToString:@"require"]) { - config.rtcpMuxPolicy = RTCRtcpMuxPolicyRequire; - } - } - - if (json[@"tcpCandidatePolicy"] != nil && [json[@"tcpCandidatePolicy"] isKindOfClass:[NSString class]]) { - NSString *tcpCandidatePolicy = json[@"tcpCandidatePolicy"]; - if ([tcpCandidatePolicy isEqualToString:@"enabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyEnabled; - } else if ([tcpCandidatePolicy isEqualToString:@"disabled"]) { - config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled; - } - } - - if (json[@"sdpSemantics"] != nil && [json[@"sdpSemantics"] isKindOfClass:[NSString class]]) { - NSString *sdpSemantics = json[@"sdpSemantics"]; - if ([sdpSemantics isEqualToString:@"plan-b"]) { - config.sdpSemantics = RTCSdpSemanticsPlanB; - } else if ([sdpSemantics isEqualToString:@"unified-plan"]) { - config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; - } - } - - return config; -} - -- (RTCDataChannelConfiguration *)RTCDataChannelConfiguration:(id)json -{ - if (!json) { - return nil; - } - if ([json isKindOfClass:[NSDictionary class]]) { - RTCDataChannelConfiguration *init = [RTCDataChannelConfiguration new]; - - if (json[@"id"]) { - [init setChannelId:(int)[json[@"id"] integerValue]]; - } - if (json[@"ordered"]) { - init.isOrdered = [json[@"ordered"] boolValue]; - } - if (json[@"maxRetransmitTime"]) { - init.maxRetransmitTimeMs = [json[@"maxRetransmitTime"] integerValue]; - } - if (json[@"maxRetransmits"]) { - init.maxRetransmits = [json[@"maxRetransmits"] intValue]; - } - if (json[@"negotiated"]) { - init.isNegotiated = [json[@"negotiated"] boolValue]; - } - if (json[@"protocol"]) { - init.protocol = json[@"protocol"]; - } - return init; - } - return nil; -} - -- (CGRect)parseRect:(NSDictionary *)rect { - return CGRectMake([[rect valueForKey:@"left"] doubleValue], - [[rect valueForKey:@"top"] doubleValue], - [[rect valueForKey:@"width"] doubleValue], - [[rect valueForKey:@"height"] doubleValue]); -} - -@end diff --git a/macos/Classes/FlutterWebRTCPlugin.m b/macos/Classes/FlutterWebRTCPlugin.m new file mode 120000 index 0000000000..7d5cc6ca16 --- /dev/null +++ b/macos/Classes/FlutterWebRTCPlugin.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterWebRTCPlugin.m \ No newline at end of file diff --git a/macos/Classes/FlutterWebRTCPlugin.swift b/macos/Classes/FlutterWebRTCPlugin.swift deleted file mode 100644 index 9521120a03..0000000000 --- a/macos/Classes/FlutterWebRTCPlugin.swift +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import FlutterMacOS -import Foundation - -public class FLEFlutterWebRTCPlugin: NSObject, FlutterPlugin { - public static func register(with registrar: FlutterPluginRegistrar) { - FlutterWebRTCPlugin.register(with: registrar) - } -} From 61ef49c4b041d1b796a3deba419ef0ce4e38584a Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Tue, 13 Oct 2020 21:38:24 +0800 Subject: [PATCH 26/26] Update rtc_peerconnection_impl.dart --- lib/src/web/rtc_peerconnection_impl.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/src/web/rtc_peerconnection_impl.dart b/lib/src/web/rtc_peerconnection_impl.dart index ed3c12c38c..db57c3278e 100644 --- a/lib/src/web/rtc_peerconnection_impl.dart +++ b/lib/src/web/rtc_peerconnection_impl.dart @@ -86,7 +86,7 @@ class RTCPeerConnectionWeb extends RTCPeerConnection { js.JsObject.fromBrowserObject(_jsPc)['connectionstatechange'] = js.JsFunction.withThis((_, state) { _connectionState = peerConnectionStateForString(state); - onConnectionState.call(_connectionState); + onConnectionState?.call(_connectionState); }); js.JsObject.fromBrowserObject(_jsPc)['negotiationneeded'] =