8000 Merge pull request #102 from wbarnard/speakerphone · next-coder/flutter-webrtc@4e2548b · GitHub
[go: up one dir, main page]

Skip to content

Commit 4e2548b

Browse files
authored
Merge pull request flutter-webrtc#102 from wbarnard/speakerphone
Speakerphone
2 parents e466d0e + 2e7dcc1 commit 4e2548b

File tree

3 files changed

+33
-0
lines changed

3 files changed

+33
-0
lines changed

android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import android.content.Context;
55
import android.hardware.Camera;
66
import android.graphics.SurfaceTexture;
7+
import android.media.AudioManager;
78
import android.util.Log;
89
import android.util.LongSparseArray;
910

@@ -308,6 +309,11 @@ public void onMethodCall(MethodCall call, Result notSafeResult) {
308309
boolean mute = call.argument("mute");
309310
mediaStreamTrackSetMicrophoneMute(trackId, mute);
310311
result.success(null);
312+
} else if (call.method.equals("enableSpeakerphone")) {
313+
String trackId = call.argument("trackId");
314+
boolean enable = call.argument("enable");
315+
mediaStreamTrackEnableSpeakerphone(trackId, enable);
316+
result.success(null);
311317
} else if(call.method.equals("getDisplayMedia")) {
312318
Map<String, Object> constraints = call.argument("constraints");
313319
ConstraintsMap constraintsMap = new ConstraintsMap(constraints);
@@ -902,6 +908,16 @@ public void mediaStreamTrackSetMicrophoneMute(final String id, boolean mute) {
902908
}
903909
}
904910

911+
public void mediaStreamTrackEnableSpeakerphone(final String id, boolean enabled) {
912+
AudioManager audioManager = (AudioManager) getContext().getSystemService(Context.AUDIO_SERVICE);
913+
914+
try {
915+
audioManager.setSpeakerphoneOn(enabled);
916+
} catch (Exception e) {
917+
Log.e(TAG, "setSpeakerphoneOn(): error", e);
918+
}
919+
}
920+
905921
public void mediaStreamTrackRelease(final String streamId, final String _trackId) {
906922
MediaStream stream = localStreams.get(streamId);
907923
if (stream == null) {

ios/Classes/FlutterWebRTCPlugin.m

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -388,6 +388,15 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result
388388
audioTrack.isEnabled = !mute.boolValue;
389389
}
390390
result(nil);
391+
} else if ([@"enableSpeakerphone" isEqualToString:call.method]) {
392+
NSDictionary* argsMap = call.arguments;
393+
NSNumber* enable = argsMap[@"enable"];
394+
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
395+
[audioSession setCategory:AVAudioSessionCategoryPlayAndRecord
396+
withOptions:enable.boolValue ? AVAudioSessionCategoryOptionDefaultToSpeaker : 0
397+
error:nil];
398+
[audioSession setActive:YES error:nil];
399+
result(nil);
391400
}else if ([@"getLocalDescription" isEqualToString:call.method]) {
392401
NSDictionary* argsMap = call.arguments;
393402
NSString* peerConnectionId = argsMap[@"peerConnectionId"];

lib/media_stream_track.dart

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,14 @@ class MediaStreamTrack {
4646
);
4747
}
4848

49+
void enableSpeakerphone(bool enable) async {
50+
print('MediaStreamTrack:enableSpeakerphone $enable');
51+
await _channel.invokeMethod(
52+
'enableSpeakerphone',
53+
<String, dynamic>{'trackId': _trackId, 'enable': enable},
54+
);
55+
}
56+
4957
captureFrame(String filePath) =>
5058
_channel.invokeMethod(
5159
'captureFrame',

0 commit comments

Comments
 (0)
0