8000 Enable audio capture when acquiring track (#750) · flutter-robert/flutter-webrtc@64c513c · GitHub
[go: up one dir, main page]

Skip to content

Commit 64c513c

Browse files
authored
Enable audio capture when acquiring track (flutter-webrtc#750)
1 parent 0225363 commit 64c513c

File tree

2 files changed

+24
-28
lines changed

2 files changed

+24
-28
lines changed

common/darwin/Classes/FlutterRTCMediaStream.m

Lines changed: 24 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
#import "FlutterRTCFrameCapturer.h"
66
#import "FlutterRTCMediaStream.h"
77
#import "FlutterRTCPeerConnection.h"
8+
#import "AudioUtils.h"
89

910
#if TARGET_OS_IPHONE
1011
#import "FlutterRPScreenRecorder.h"
@@ -71,9 +72,12 @@ - (void)getUserAudio:(NSDictionary *)constraints
7172
NSString *trackId = [[NSUUID UUID] UUIDString];
7273
RTCAudioTrack *audioTrack
7374
= [self.peerConnectionFactory audioTrackWithTrackId:trackId];
74-
75+
7576
[mediaStream addAudioTrack:audioTrack];
76-
77+
78+
// allow audio capture
79+
[AudioUtils ensureAudioSessionWithRecording:YES];
80+
7781
successCallback(mediaStream);
7882
}
7983

@@ -88,20 +92,20 @@ -(void)getUserMedia:(NSDictionary *)constraints
8892
NSString *mediaStreamId = [[NSUUID UUID] UUIDString];
8993
RTCMediaStream *mediaStream
9094
= [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
91-
95+
9296
[self
9397
getUserMedia:constraints
9498
successCallback:^ (RTCMediaStream *mediaStream) {
9599
NSString *mediaStreamId = mediaStream.streamId;
96-
100+
97101
NSMutableArray *audioTracks = [NSMutableArray array];
98102
NSMutableArray *videoTracks = [NSMutableArray array];
99-
103+
100104
for (RTCAudioTrack *track in mediaStream.audioTracks) {
101105
[self.localTracks setObject:track forKey:track.trackId];
102106
[audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}];
103107
}
104-
108+
105109
for (RTCVideoTrack *track in mediaStream.videoTracks) {
106110
[self.localTracks setObject:track forKey:track.trackId];
107111
[videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}];
@@ -161,7 +165,7 @@ - (void)getUserMedia:(NSDictionary *)constraints
161165
return;
162166
}
163167
}
164-
168+
165169
// If mediaStream contains no videoTracks and the constraints request such a
166170
// track, then run an iteration of the getUserMedia() algorithm to obtain
167171
// local video content.
@@ -185,7 +189,7 @@ - (void)getUserMedia:(NSDictionary *)constraints
185189
#endif
186190
}
187191
}
188-
192+
189193
// There are audioTracks and/or videoTracks in mediaStream as requested by
190194
// constraints so the getUserMedia() is to conclude with success.
191195
successCallback(mediaStream);
@@ -258,17 +262,17 @@ - (void)getUserVideo:(NSDictionary *)constraints
258262
videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
259263
}
260264
}
261-
265+
262266
//TODO(rostopira): refactor to separate function and add support for max
263267

264268
self._targetWidth = 1280;
265269
self._targetHeight = 720;
266270
self._targetFps = 30;
267-
271+
268272
if (!videoDevice && [constraints[@"video"] boolValue] == YES) {
269273
videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
270274
}
271-
275+
272276
id mandatory = [videoConstraints isKindOfClass:[NSDictionary class]]? videoConstraints[@"mandatory"] : nil ;
273277

274278
// constraints.video.mandatory
@@ -296,7 +300,7 @@ - (void)getUserVideo:(NSDictionary *)constraints
296300
}
297301
}
298302
}
299-
303+
300304
if (videoDevice) {
301305
RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource];
302306
if (self.videoCapturer) {
@@ -310,11 +314,11 @@ - (void)getUserVideo:(NSDictionary *)constraints
310314
NSLog(@"Start capture error: %@", [error localizedDescription]);
311315
}
312316
}];
313-
317+
314318
NSString *trackUUID = [[NSUUID UUID] UUIDString];
315319
RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID];
316320
[mediaStream addVideoTrack:videoTrack];
317-
321+
318322
successCallback(mediaStream);
319323
} else {
320324
// According to step 6.2.3 of the getUserMedia() algorithm, if there is no
@@ -390,7 +394,7 @@ - (void)requestAccessForMediaType:(NSString *)mediaType
390394
errorCallback:errorCallback
391395
mediaStream:mediaStream];
392396
};
393-
397+
394398
if (mediaType == AVMediaTypeAudio) {
395399
[self getUserAudio:constraints
396400
successCallback:scb
@@ -441,27 +445,27 @@ -(void)getDisplayMedia:(NSDictionary *)constraints
441445
result:(FlutterResult)result {
442446
NSString *mediaStreamId = [[NSUUID UUID] UUIDString];
443447
RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
444-
448+
445449
RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource];
446450
FlutterRPScreenRecorder *screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource];
447451

448452
[screenCapturer startCapture];
449-
453+
450454
//TODO:
451455
self.videoCapturer = screenCapturer;
452-
456+
453457
NSString *trackUUID = [[NSUUID UUID] UUIDString];
454458
RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID];
455459
[mediaStream addVideoTrack:videoTrack];
456-
460+
457461
NSMutableArray *audioTracks = [NSMutableArray array];
458462
NSMutableArray *videoTracks = [NSMutableArray array];
459463

460464
for (RTCVideoTrack *track in mediaStream.videoTracks) {
461465
[self.localTracks setObject:track forKey:track.trackId];
462466
[videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}];
463467
}
464-
468+
465469
self.localStreams[mediaStreamId] = mediaStream;
466470
result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks });
467471
}

common/darwin/Classes/FlutterWebRTCPlugin.m

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -659,10 +659,6 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result
659659
return;
660660
}
661661

662-
if ([track.kind isEqualToString:@"audio"]) {
663-
[AudioUtils ensureAudioSessionWithRecording:YES];
664-
}
665-
666662
result([self rtpSenderToMap:sender]);
667663
} else if ([@"removeTrack" isEqualToString:call.method]){
668664
NSDictionary* argsMap = call.arguments;
@@ -734,10 +730,6 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result
734730
return;
735731
}
736732

737-
if (hasAudio) {
738-
[AudioUtils ensureAudioSessionWithRecording:YES];
739-
}
740-
741733
result([self transceiverToMap:transceiver]);
742734
} else if ([@"rtpTransceiverSetDirection" isEqualToString:call.method]){
743735
NSDictionary* argsMap = call.arguments;

0 commit comments

Comments
 (0)
0