8000 Merge branch 'master' into unified-plan · Condelab/flutter-webrtc@33a500e · GitHub
[go: up one dir, main page]

Skip to content

Commit 33a500e

Browse files
committed
Merge branch 'master' into unified-plan
2 parents e4eaa33 + 4e2548b commit 33a500e

File tree

3 files changed

+66
-1
lines changed

3 files changed

+66
-1
lines changed

android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java

Lines changed: 31 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import android.content.Context;
55
import android.hardware.Camera;
66
import android.graphics.SurfaceTexture;
7+
import android.media.AudioManager;
78
import android.util.Log;
89
import android.util.LongSparseArray;
910

@@ -69,6 +70,7 @@ public class FlutterWebRTCPlugin implements MethodCallHandler {
6970
private GetUserMediaImpl getUserMediaImpl;
7071
final PeerConnectionFactory mFactory;
7172

73+
private AudioDeviceModule audioDeviceModule;
7274

7375
public Activity getActivity() {
7476
return registrar.activity();
@@ -108,7 +110,7 @@ private FlutterWebRTCPlugin(Registrar registrar, MethodChannel channel) {
108110

109111
getUserMediaImpl = new GetUserMediaImpl(this, registrar.context());
110112

111-
AudioDeviceModule audioDeviceModule = JavaAudioDeviceModule.builder(registrar.context())
113+
audioDeviceModule = JavaAudioDeviceModule.builder(registrar.context())
112114
.setUseHardwareAcousticEchoCanceler(true)
113115
.setUseHardwareNoiseSuppressor(true)
114116
.setSamplesReadyCallback(getUserMediaImpl.inputSamplesInterceptor)
@@ -302,6 +304,16 @@ public void onMethodCall(MethodCall call, Result notSafeResult) {
302304
double volume = call.argument("volume");
303305
mediaStreamTrackSetVolume(trackId, volume);
304306
result.success(null);
307+
} else if (call.method.equals("setMicrophoneMute")) {
308+
String trackId = call.argument("trackId");
309+
boolean mute = call.argument("mute");
310+
mediaStreamTrackSetMicrophoneMute(trackId, mute);
311+
result.success(null);
312+
} else if (call.method.equals("enableSpeakerphone")) {
313+
String trackId = call.argument("trackId");
314+
boolean enable = call.argument("enable");
315+
mediaStreamTrackEnableSpeakerphone(trackId, enable);
316+
result.success(null);
305317
} else if(call.method.equals("getDisplayMedia")) {
306318
Map<String, Object> constraints = call.argument("constraints");
307319
ConstraintsMap constraintsMap = new ConstraintsMap(constraints);
@@ -888,6 +900,24 @@ public void mediaStreamTrackSetVolume(final String id, final double volume) {
888900
}
889901
}
890902

903+
public void mediaStreamTrackSetMicrophoneMute(final String id, boolean mute) {
904+
try {
905+
audioDeviceModule.setMicrophoneMute(mute);
906+
} catch (Exception e) {
907+
Log.e(TAG, "setMicrophoneMute(): error", e);
908+
}
909+
}
910+
911+
public void mediaStreamTrackEnableSpeakerphone(final String id, boolean enabled) {
912+
AudioManager audioManager = (AudioManager) getContext().getSystemService(Context.AUDIO_SERVICE);
913+
914+
try {
915+
audioManager.setSpeakerphoneOn(enabled);
916+
} catch (Exception e) {
917+
Log.e(TAG, "setSpeakerphoneOn(): error", e);
918+
}
919+
}
920+
891921
public void mediaStreamTrackRelease(final String streamId, final String _trackId) {
892922
MediaStream stream = localStreams.get(streamId);
893923
if (stream == null) {

ios/Classes/FlutterWebRTCPlugin.m

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -378,6 +378,25 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result
378378
audioSource.volume = [volume doubleValue];
379379
}
380380
result(nil);
381+
} else if ([@"setMicrophoneMute" isEqualToString:call.method]) {
382+
NSDictionary* argsMap = call.arguments;
383+
NSString* trackId = argsMap[@"trackId"];
384+
NSNumber* mute = argsMap[@"mute"];
385+
RTCMediaStreamTrack *track = self.localTracks[trackId];
386+
if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) {
387+
RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
388+
audioTrack.isEnabled = !mute.boolValue;
389+
}
390+
result(nil);
391+
} else if ([@"enableSpeakerphone" isEqualToString:call.method]) {
392+
NSDictionary* argsMap = call.arguments;
393+
NSNumber* enable = argsMap[@"enable"];
394+
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
395+
[audioSession setCategory:AVAudioSessionCategoryPlayAndRecord
396+
withOptions:enable.boolValue ? AVAudioSessionCategoryOptionDefaultToSpeaker : 0
397+
error:nil];
398+
[audioSession setActive:YES error:nil];
399+
result(nil);
381400
}else if ([@"getLocalDescription" isEqualToString:call.method]) {
382401
NSDictionary* argsMap = call.arguments;
383402
NSString* peerConnectionId = argsMap[@"peerConnectionId"];

lib/media_stream_track.dart

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,22 @@ class MediaStreamTrack {
4242
);
4343
}
4444

45+
void setMicrophoneMute(bool mute) async {
46+
print('MediaStreamTrack:setMicrophoneMute $mute');
47+
await _channel.invokeMethod(
48+
'setMicrophoneMute',
49+
<String, dynamic>{'trackId': _trackId, 'mute': mute},
50+
);
51+
}
52+
53+
void enableSpeakerphone(bool enable) async {
54+
print('MediaStreamTrack:enableSpeakerphone $enable');
55+
await _channel.invokeMethod(
56+
'enableSpeakerphone',
57+
<String, dynamic>{'trackId': _trackId, 'enable': enable},
58+
);
59+
}
60+
4561
captureFrame(String filePath) => _channel.invokeMethod(
4662
'captureFrame',
4763
<String, dynamic>{'trackId': _trackId, 'path': filePath},

0 commit comments

Comments
 (0)
0