8000 Improve audio handling on iOS (#705) · flutter-robert/flutter-webrtc@7fb1316 · GitHub
[go: up one dir, main page]

Skip to content

Commit 7fb1316

Browse files
Improve audio handling on iOS (flutter-webrtc#705)
* Use `track` instead of `kind` when both `track` and `kind` are set (flutter-webrtc#4) * `getSettings` for Web (flutter-webrtc#3) * Fix: init would never be `RTCRtpTransceiverInitWeb` * Clean up * Fix: Flutter 2.5.0 `RTCVideoRendererWeb` bug * upgrade AudioSession if existing session doesn't meet defaults * improved session category, set defaults * fix lightning headphones Co-authored-by: Hiroshi Horie <me@hiroshi.app> Co-authored-by: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com>
1 parent 0213d97 commit 7fb1316

File tree

9 files changed

+186
-103
lines changed

9 files changed

+186
-103
lines changed

common/darwin/Classes/AudioUtils.h

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
@interface AudioUtils : NSObject
2+
3+
+ (void)ensureAudioSessionWithRecording:(BOOL)recording;
4+
// needed for wired headphones to use headphone mic
5+
+ (void)setPreferHeadphoneInput;
6+
7+
@end

common/darwin/Classes/AudioUtils.m

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
#import "AudioUtils.h"
2+
#import <WebRTC/WebRTC.h>
3+
4+
#if TARGET_OS_IPHONE
5+
#import <AVFoundation/AVFoundation.h>
6+
#endif
7+
8+
@implementation AudioUtils
9+
10+
+ (void)ensureAudioSessionWithRecording:(BOOL)recording {
11+
#if TARGET_OS_IPHONE
12+
RTCAudioSession *session = [RTCAudioSession sharedInstance];
13+
// we also need to set default WebRTC audio configuration, since it may be activated after
14+
// this method is called
15+
RTCAudioSessionConfiguration *config = [RTCAudioSessionConfiguration webRTCConfiguration];
16+
// require audio session to be either PlayAndRecord or MultiRoute
17+
if (recording && session.category != AVAudioSessionCategoryPlayAndRecord &&
18+
session.category != AVAudioSessionCategoryMultiRoute) {
19+
config.category = AVAudioSessionCategoryPlayAndRecord;
20+
config.categoryOptions = AVAudioSessionCategoryOptionDefaultToSpeaker |
21+
AVAudioSessionCategoryOptionAllowBluetooth |
22+
AVAudioSessionCategoryOptionAllowBluetoothA2DP;
23+
[session setCategory:config.category
24+
withOptions:config.categoryOptions
25+
error:nil];
26+
[session setMode:config.mode error:nil];
27+
} else if (!recording && (session.category == AVAudioSessionCategoryAmbient
28+
|| session.category == AVAudioSessionCategorySoloAmbient)) {
29+
config.category = AVAudioSessionCategoryPlayback;
30+
config.categoryOptions = 0;
31+
32+
// upgrade from ambient if needed
33+
[session setCategory:config.category
34+
withOptions:config.categoryOptions
35+
error:nil];
36+
[session setMode:config.mode error:nil];
37+
}
38+
#endif
39+
}
40+
41+
+ (void)setPreferHeadphoneInput {
42+
#if TARGET_OS_IPHONE
43+
AVAudioSession *session = [AVAudioSession sharedInstance];
44+
AVAudioSessionPortDescription *inputPort = nil;
45+
for (AVAudioSessionPortDescription *port in session.availableInputs) {
46+
if ([port.portType isEqualToString:AVAudioSessionPortHeadphones]) {
47+
inputPort = port;
48+
break;
49+
}
50+
}
51+
if (inputPort != nil) {
52+
[session setPreferredInput:inputPort error:nil];
53+
}
54+
#endif
55+
}
56+
57+
@end

common/darwin/Classes/FlutterRTCPeerConnection.m

Lines changed: 36 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,11 @@
22
#import "FlutterWebRTCPlugin.h"
33
#import "FlutterRTCPeerConnection.h"
44
#import "FlutterRTCDataChannel.h"
5+
#import "AudioUtils.h"
56

67
#import <WebRTC/WebRTC.h>
78

9+
810
@implementation RTCPeerConnection (Flutter)
911

1012
@dynamic eventSink;
@@ -170,11 +172,11 @@ -(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate
170172
-(void) peerConnectionClose:(RTCPeerConnection *)peerConnection
171173
{
172174
[peerConnection close];
173-
175+
174176
// Clean up peerConnection's streams and tracks
175177
[peerConnection.remoteStreams removeAllObjects];
176178
[peerConnection.remoteTracks removeAllObjects];
177-
179+
178180
// Clean up peerConnection's dataChannels.
179181
NSMutableDictionary<NSNumber *, RTCDataChannel *> *dataChannels
180182
= peerConnection.dataChannels;
@@ -198,17 +200,17 @@ -(void) peerConnectionGetStats:(nonnull NSString *)trackID
198200
[peerConnection statsForTrack:track
199201
statsOutputLevel:RTCStatsOutputLevelStandard
200202
completionHandler:^(NSArray<RTCLegacyStatsReport *> *reports) {
201-
203+
202204
NSMutableArray *stats = [NSMutableArray array];
203-
205+
204206
for (RTCLegacyStatsReport *report in reports) {
205207
[stats addObject:@{@"id": report.reportId,
206208
@"type": report.type,
207209
@"timestamp": @(report.timestamp),
208210
@"values": report.values
209211
}];
210212
}
211-
213+
212214
result(@{@"stats": stats});
213215
}];
214216
}else{
@@ -281,7 +283,7 @@ - (void)parseJavaScriptConstraints:(NSDictionary *)src
281283
for (id srcKey in src) {
282284
id srcValue = src[srcKey];
283285
NSString *dstValue;
284-
286+
285287
if ([srcValue isKindOfClass:[NSNumber class]]) {
286288
dstValue = [srcValue boolValue] ? @"true" : @"false";
287289
} else {
@@ -304,16 +306,16 @@ - (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints {
304306
id mandatory = constraints[@"mandatory"];
305307
NSMutableDictionary<NSString *, NSString *> *mandatory_
306308
= [NSMutableDictionary new];
307-
309+
308310
if ([mandatory isKindOfClass:[NSDictionary class]]) {
309311
[self parseJavaScriptConstraints:(NSDictionary *)mandatory
310312
intoWebRTCConstraints:mandatory_];
311313
}
312-
314+
313315
id optional = constraints[@"optional"];
314316
NSMutableDictionary<NSString *, NSString *> *optional_
315317
= [NSMutableDictionary new];
316-
318+
317319
if ([optional isKindOfClass:[NSArray class]]) {
318320
for (id o in (NSArray *)optional) {
319321
if ([o isKindOfClass:[NSDictionary class]]) {
@@ -322,7 +324,7 @@ - (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints {
322324
}
323325
}
324326
}
325-
327+
326328
return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_
327329
optionalConstraints:optional_];
328330
}
@@ -341,11 +343,11 @@ - (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingSta
341343

342344
-(void)peerConnection:(RTCPeerConnection *)peerConnection
343345
mediaStream:(RTCMediaStream *)stream didAddTrack:(RTCVideoTrack*)track{
344-
346+
345347
peerConnection.remoteTracks[track.trackId] = track;
346348
NSString *streamId = stream.streamId;
347349
peerConnection.remoteStreams[streamId] = stream;
348-
350+
349351
FlutterEventSink eventSink = peerConnection.eventSink;
350352
if(eventSink){
351353
eventSink(@{
@@ -388,19 +390,25 @@ - (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMedi
388390
NSMutableArray *audioTracks = [NSMutableArray array];
389391
NSMutableArray *videoTracks = [NSMutableArray array];
390392

393+
BOOL hasAudio = NO;
391394
for (RTCAudioTrack *track in stream.audioTracks) {
392395
peerConnection.remoteTracks[track.trackId] = track;
393396
[audioTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}];
397+
hasAudio = YES;
394398
}
395-
399+
396400
for (RTCVideoTrack *track in stream.videoTracks) {
397401
peerConnection.remoteTracks[track.trackId] = track;
398402
[videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}];
399403
}
400-
404+
401405
NSString *streamId = stream.streamId;
402406
peerConnection.remoteStreams[streamId] = stream;
403-
407+
408+
if (hasAudio) {
409+
[AudioUtils ensureAudioSessionWithRecording:NO];
410+
}
411+
404412
FlutterEventSink eventSink = peerConnection.eventSink;
405413
if(eventSink){
406414
eventSink(@{
@@ -419,15 +427,15 @@ - (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCM
419427
NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", stream.streamId);
420428
}
421429
NSString *streamId = stream.streamId;
422-
430+
423431
for (RTCVideoTrack *track in stream.videoTracks) {
424432
[peerConnection.remoteTracks removeObjectForKey:track.trackId];
425433
}
426434
for (RTCAudioTrack *track in stream.audioTracks) {
427435
[peerConnection.remoteTracks removeObjectForKey:track.trackId];
428436
}
429437
[peerConnection.remoteStreams removeObjectForKey:streamId];
430-
438+
431439
FlutterEventSink eventSink = peerConnection.eventSink;
432440
if(eventSink){
433441
eventSink(@{
@@ -483,15 +491,15 @@ - (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RT
483491
dataChannel.peerConnectionId = peerConnection.flutterId;
484492
dataChannel.delegate = self;
485493
peerConnection.dataChannels[dataChannelId] = dataChannel;
486-
494+
487495
FlutterEventChannel *eventChannel = [FlutterEventChannel
488496
eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$d", peerConnection.flutterId, dataChannel.channelId]
489497
binaryMessenger:self.messenger];
490-
498+
491499
dataChannel.eventChannel = eventChannel;
492500
dataChannel.flutterChannelId = dataChannelId;
493501
[eventChannel setStreamHandler:dataChannel];
494-
502+
495503
FlutterEventSink eventSink = peerConnection.eventSink;
496504
if(eventSink){
497505
eventSink(@{
@@ -516,7 +524,7 @@ - (void)peerConnection:(RTCPeerConnection *)peerConnection
516524

517525
- (void)peerConnection:(RTCPeerConnection *)peerConnection
518526
didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver {
519-
527+
520528
}
521529

522530
/** Called when a receiver and its track are created. */
@@ -537,7 +545,7 @@ - (void)peerConnection:(RTCPeerConnection *)peerConnection
537545
@"receiver": [self receiverToMap:rtpReceiver],
538546
@"streams": streams,
539547
}];
540-
548+
541549
if(peerConnection.configuration.sdpSemantics == RTCSdpSemanticsUnifiedPlan) {
542550
for(RTCRtpTransceiver *transceiver in peerConnection.transceivers) {
543551
if(transceiver.receiver != nil && [transceiver.receiver.receiverId isEqualToString:rtpReceiver.receiverId]) {
@@ -550,15 +558,18 @@ - (void)peerConnection:(RTCPeerConnection *)peerConnection
550558
if (mediaStreams.count > 0) {
551559
peerConnection.remoteStreams[mediaStreams[0].streamId] = mediaStreams[0];
552560
}
553-
561+
562+
if ([rtpReceiver.track.kind isEqualToString:@"audio"]) {
563+
[AudioUtils ensureAudioSessionWithRecording:NO];
564+
}
554565
eventSink(event);
555566
}
556567
}
557568

558569
/** Called when the receiver and its track are removed. */
559570
- (void)peerConnection:(RTCPeerConnection *)peerConnection
560571
didRemoveReceiver:(RTCRtpReceiver *)rtpReceiver {
561-
572+
562573
}
563574

564575
/** Called when the selected ICE candidate pair is changed. */
@@ -567,7 +578,7 @@ - (void)peerConnection:(RTCPeerConnection *)peerConnection
567578
remoteCandidate:(RTCIceCandidate *)remote
568579
lastReceivedMs:(int)lastDataReceivedMs
569580
changeReason:(NSString *)reason {
570-
581+
571582
FlutterEventSink eventSink = peerConnection.eventSink;
572583
if(eventSink){
573584
eventSink(@{

0 commit comments

Comments
 (0)
0