8000 1, Add the getSources method. · lineCode/flutter-webrtc@722b777 · GitHub
[go: up one dir, main page]

Skip to content

Commit 722b777

Browse files
committed
1, Add the getSources method.
2. Fix iOS VideoCapture.stop issues. 3, Fix the size issue of Flutter VideoView.
1 parent 2214541 commit 722b777

File tree

13 files changed

+156
-115
lines changed

13 files changed

+156
-115
lines changed

android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,12 @@ public class FlutterRTCVideoRenderer implements EventChannel.StreamHandler {
2121

2222
private static final String TAG = FlutterWebRTCPlugin.TAG;
2323
private final SurfaceTexture texture;
24+
private final Context context;
2425
private int id = -1;
2526

2627
public void Dispose(){
2728
//destroy
29+
this.surfaceTextureRenderer.release();
2830
eventChannel.setStreamHandler(null);
2931
eventSink = null;
3032
}
@@ -69,7 +71,7 @@ public void onFrameResolutionChanged(
6971
}
7072
};
7173

72-
private final SurfaceTextureRenderer surfaceTextureRenderer;
74+
private SurfaceTextureRenderer surfaceTextureRenderer;
7375

7476
/**
7577
* The {@code VideoRenderer}, if any, which renders {@link #videoTrack} on
@@ -88,6 +90,7 @@ public void onFrameResolutionChanged(
8890
public FlutterRTCVideoRenderer(SurfaceTexture texture, Context context) {
8991
this.surfaceTextureRenderer = new SurfaceTextureRenderer(context, texture);
9092
this.texture = texture;
93+
this.context = context;
9194
this.even 8000 tSink = null;
9295
}
9396

@@ -167,6 +170,9 @@ private void setVideoTrack(VideoTrack videoTrack) {
167170

168171
if (videoTrack != null) {
169172
tryAddRendererToVideoTrack();
173+
}else{
174+
this.surfaceTextureRenderer.release();
175+
this.surfaceTextureRenderer = new SurfaceTextureRenderer(context, texture);
170176
}
171177
}
172178
}

android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,8 @@ public void onMethodCall(MethodCall call, Result result) {
116116
Map<String, Object> constraints = call.argument("constraints");
117117
ConstraintsMap constraintsMap = new ConstraintsMap(constraints);
118118
getUserMedia(constraintsMap, result);
119+
}else if (call.method.equals("getSources")) {
120+
getSources(result);
119121
}else if (call.method.equals("createOffer")) {
120122
String peerConnectionId = call.argument("peerConnectionId");
121123
Map<String, Object> constraints = call.argument("constraints");
@@ -175,7 +177,7 @@ public void onMethodCall(MethodCall call, Result result) {
175177
String trackId = call.argument("trackId");
176178
localTracks.remove(trackId);
177179
result.success(null);
178-
} else if (call.method.equals("peerConnectionDispose")) {
180+
} else if (call.method.equals("peerConnectionClose")) {
179181
String peerConnectionId = call.argument("peerConnectionId");
180182
peerConnectionClose(peerConnectionId);
181183
result.success(null);
@@ -214,10 +216,6 @@ public void onMethodCall(MethodCall call, Result result) {
214216
}
215217

216218
MediaStream stream = getStreamForId(streamId);
217-
if(stream == null ){
218-
result.error("MediaStreamNotFound", "media stream [" + streamId + "] not found !", null);
219-
return;
220-
}
221219
render.setStream(stream);
222220
result.success(null);
223221
} else {
@@ -628,7 +626,7 @@ public void getUserMedia(ConstraintsMap constraints, Result result) {
628626
getUserMediaImpl.getUserMedia(constraints, result, mediaStream);
629627
}
630628

631-
public void mediaStreamTrackGetSources(Result result) {
629+
public void getSources(Result result) {
632630
ConstraintsArray array = new ConstraintsArray();
633631
String[] names = new String[Camera.getNumberOfCameras()];
634632

@@ -644,7 +642,6 @@ public void mediaStreamTrackGetSources(Result result) {
644642
audio.putString("deviceId", "audio-1");
645643
audio.putString("facing", "");
646644
audio.putString("kind", "audioinput");
647-
648645
array.pushMap(audio);
649646
result.success(array);
650647
}
@@ -720,7 +717,6 @@ public ConstraintsMap getCameraInfo(int index) {
720717
params.putString("deviceId", "" + index);
721718
params.putString("facing", facing);
722719
params.putString(&qu F438 ot;kind", "videoinput");
723-
724720
return params;
725721
}
726722

example/android/app/.classpath

Lines changed: 0 additions & 6 deletions
This file was deleted.

example/lib/main.dart

Lines changed: 45 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@ import 'package:webrtc/get_user_media.dart';
66
import 'package:webrtc/rtc_session_description.dart';
77
import 'package:webrtc/rtc_video_view.dart';
88
import 'package:webrtc/rtc_ice_candidate.dart';
9-
import 'dart:async';
109
import 'dart:core';
1110

1211
void main() => runApp(new MyApp());
@@ -21,11 +20,17 @@ class _MyAppState extends State<MyApp> {
2120
RTCPeerConnection _peerConnection;
2221
final _localRenderer = new RTCVideoRenderer();
2322
final _remoteRenderer = new RTCVideoRenderer();
23+
bool incalling = false;
2424

2525
@override
2626
initState() {
2727
super.initState();
28-
initPlatformState();
28+
initRenderers();
29+
}
30+
31+
initRenderers() async {
32+
await _localRenderer.initialize();
33+
await _remoteRenderer.initialize();
2934
}
3035

3136
_onSignalingState(RTCSignalingState state) {
@@ -59,7 +64,7 @@ class _MyAppState extends State<MyApp> {
5964
}
6065

6166
// Platform messages are asynchronous, so we initialize in an async me 10000 thod.
62-
initPlatformState() async {
67+
_makeCall() async {
6368
final Map<String, dynamic> mediaConstraints = {
6469
"audio": true,
6570
"video": {
@@ -80,30 +85,29 @@ class _MyAppState extends State<MyApp> {
8085
]
8186
};
8287

83-
final Map<String, dynamic> OFFER_SDP_CONSTRAINTS = {
88+
final Map<String, dynamic> offer_sdp_constraints = {
8489
"mandatory": {
8590
"OfferToReceiveAudio": true,
8691
"OfferToReceiveVideo": true,
8792
},
8893
"optional": [],
8994
};
9095

91-
final Map<String, dynamic> LOOPBACK_CONSTRAINTS = {
96+
final Map<String, dynamic> loopback_constraints = {
9297
"mandatory": {},
9398
"optional": [
9499
{"DtlsSrtpKeyAgreement": false},
95100
],
96101
};
97102

98-
// Platform messages may fail, so we use a try/catch PlatformException.
103+
if (_peerConnection != null) return;
104+
99105
try {
100106
_localStream = await getUserMedia(mediaConstraints);
101-
await _localRenderer.initialize();
102-
await _remoteRenderer.initialize();
103107
_localRenderer.srcObject = _localStream;
104108

105109
_peerConnection =
106-
await createPeerConnection(configuration, LOOPBACK_CONSTRAINTS);
110+
await createPeerConnection(configuration, loopback_constraints);
107111

108112
_peerConnection.onSignalingState = _onSignalingState;
109113
_peerConnection.onIceGatheringState = _onIceGatheringState;
@@ -115,18 +119,35 @@ class _MyAppState extends State<MyApp> {
115119

116120
_peerConnection.addStream(_localStream);
117121
RTCSessionDescription description =
118-
await _peerConnection.createOffer(OFFER_SDP_CONSTRAINTS);
122+
await _peerConnection.createOffer(offer_sdp_constraints);
119123
print(description.sdp);
120124
_peerConnection.setLocalDescription(description);
121125
//change for loopback.
122126
description.type = 'answer';
123127
_peerConnection.setRemoteDescription(description);
124128
} catch (e) {
125-
//'Failed to get platform version.';
129+
print(e.toString());
126130
}
127131
if (!mounted) return;
128132

129-
setState(() {});
133+
setState(() {
134+
incalling = true;
135+
});
136+
}
137+
138+
_hangUp() async {
139+
try {
140+
await _peerConnection.close();
141+
_peerConnection = null;
142+
await _localStream.dispose();
143+
_localRenderer.srcObject = null;
144+
_remoteRenderer.srcObject = null;
145+
} catch (e) {
146+
print(e.toString());
147+
}
148+
setState(() {
149+
incalling = false;
150+
});
130151
}
131152

132153
@override
@@ -139,14 +160,14 @@ class _MyAppState extends State<MyApp> {
139160
body: new OrientationBuilder(
140161
builder: (context, orientation) {
141162
return new Center(
142-
143163
child: new Container(
144-
decoration: new BoxDecoration(color: Colors.black),
145-
164+
decoration: new BoxDecoration(color: Colors.white),
146165
child: new Stack(
147166
children: <Widget>[
148167
new Align(
149-
alignment: orientation == Orientation.portrait ? const FractionalOffset(0.5, 0.1):const FractionalOffset(0.0, 0.5),
168+
alignment: orientation == Orientation.portrait
169+
? const FractionalOffset(0.5, 0.1)
170+
: const FractionalOffset(0.0, 0.5),
150171
child: new Container(
151172
width: 320.0,
152173
height: 240.0,
@@ -155,7 +176,9 @@ class _MyAppState extends State<MyApp> {
155176
),
156177
),
157178
new Align(
158-
alignment: orientation == Orientation.portrait ? const FractionalOffset(0.5, 0.9):const FractionalOffset(1.0, 0.5),
179+
alignment: orientation == Orientation.portrait
180+
? const FractionalOffset(0.5, 0.9)
181+
: const FractionalOffset(1.0, 0.5),
159182
child: new Container(
160183
width: 320.0,
161184
height: 240.0,
@@ -166,10 +189,14 @@ class _MyAppState extends State<MyApp> {
166189
],
167190
),
168191
),
169-
170192
);
171193
},
172194
),
195+
floatingActionButton: new FloatingActionButton(
196+
onPressed: incalling ? _hangUp : _makeCall,
197+
tooltip: incalling ? 'Hangup' : 'Call',
198+
child: new Icon(incalling ? Icons.call_end : Icons.phone),
199+
),
173200
),
174201
);
175202
}

ios/Classes/FlutterRTCMediaStream.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@
55

66
-(void)getUserMedia:(NSDictionary *)constraints
77
result:(FlutterResult)result;
8+
9+
-(void)getSources:(FlutterResult)result;
810
@end
911

1012

ios/Classes/FlutterRTCMediaStream.m

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -378,7 +378,7 @@ - (void)requestAccessForMediaType:(NSString *)mediaType
378378
}];
379379
}
380380

381-
-(void)mediaStreamTrackGetSources{
381+
-(void)getSources:(FlutterResult)result{
382382
NSMutableArray *sources = [NSMutableArray array];
383383
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
384384
for (AVCaptureDevice *device in videoDevices) {
@@ -398,7 +398,7 @@ -(void)mediaStreamTrackGetSources{
398398
@"kind": @"audioinput",
399399
}];
400400
}
401-
//TODO: resolve(sources);
401+
result(@{@"sources": sources});
402402
}
403403

404404
-(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track

ios/Classes/FlutterWebRTCPlugin.m

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
#import <WebRTC/RTCIceCandidate.h>
1111
#import <WebRTC/RTCSessionDescription.h>
1212
#import <WebRTC/RTCIceServer.h>
13+
#import <WebRTC/RTCAVFoundationVideoSource.h>
1314

1415
#import "FlutterRTCPeerConnection.h"
1516
#import "FlutterRTCMediaStream.h"
@@ -96,7 +97,9 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result
9697
NSDictionary* argsMap = call.arguments;
9798
NSDictionary* constraints = argsMap[@"constraints"];
9899
[self getUserMedia:constraints result:result];
99-
} else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) {
100+
} else if ([@"getSources" isEqualToString:call.method]) {
101+
[self getSources:result];
102+
}else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) {
100103
NSDictionary* argsMap = call.arguments;
101104
NSString* streamId = argsMap[@"streamId"];
102105
[self mediaStreamGetTracks:streamId result:result];
@@ -256,6 +259,12 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result
256259
if (stream) {
257260
for (RTCVideoTrack *track in stream.videoTracks) {
258261
[self.localTracks removeObjectForKey:track.trackId];
262+
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
263+
RTCVideoSource *source = videoTrack.source;
264+
if ([source isKindOfClass:[RTCAVFoundationVideoSource class]]) {
265+
RTCAVFoundationVideoSource *avSource = (RTCAVFoundationVideoSource *)source;
266+
[avSource Stop];
267+
}
259268
}
260269
for (RTCAudioTrack *track in stream.audioTracks) {
261270
[self.localTracks removeObjectForKey:track.trackId];
@@ -268,7 +277,7 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result
268277
NSString* trackId = argsMap[@"trackId"];
269278
[self.localTracks removeObjectForKey:trackId];
270279
result(nil);
271-
}else if([@"peerConnectionDispose" isEqualToString:call.method]){
280+
}else if([@"peerConnectionClose" isEqualToString:call.method]){
272281
NSDictionary* argsMap = call.arguments;
273282
NSString* peerConnectionId = argsMap[@"peerConnectionId"];
274283

ios/WebRTC.framework/Headers/RTCAVFoundationVideoSource.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,10 @@ RTC_EXPORT
3131

3232
- (instancetype)init NS_UNAVAILABLE;
3333

34+
- (BOOL)IsRunning;
35+
36+
- (void)Stop;
37+
3438
/**
3539
* Calling this function will cause frames to be scaled down to the
3640
* requested resolution. Also, frames will be cropped to match the

ios/WebRTC.framework/Info.plist

0 Bytes
Binary file not shown.

ios/WebRTC.framework/WebRTC

308 Bytes
Binary file not shown.

0 commit comments

Comments
 (0)
0