|
15 | 15 | import com.cloudwebrtc.webrtc.utils.ConstraintsMap;
|
16 | 16 | import com.cloudwebrtc.webrtc.utils.EglUtils;
|
17 | 17 | import com.cloudwebrtc.webrtc.utils.ObjectType;
|
| 18 | +import com.cloudwebrtc.webrtc.utils.RTCAudioManager; |
18 | 19 |
|
19 | 20 | import java.io.UnsupportedEncodingException;
|
20 | 21 | import java.io.File;
|
@@ -72,6 +73,8 @@ public class FlutterWebRTCPlugin implements MethodCallHandler {
|
72 | 73 |
|
73 | 74 | private AudioDeviceModule audioDeviceModule;
|
74 | 75 |
|
| 76 | + private RTCAudioManager rtcAudioManager; |
| 77 | + |
75 | 78 | public Activity getActivity() {
|
76 | 79 | return registrar.activity();
|
77 | 80 | }
|
@@ -124,6 +127,35 @@ private FlutterWebRTCPlugin(Registrar registrar, MethodChannel channel) {
|
124 | 127 | .setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglContext))
|
125 | 128 | .setAudioDeviceModule(audioDeviceModule)
|
126 | 129 | .createPeerConnectionFactory();
|
| 130 | + |
| 131 | + rtcAudioManager = RTCAudioManager.create(registrar.context()); |
| 132 | + // Store existing audio settings and change audio mode to |
| 133 | + // MODE_IN_COMMUNICATION for best possible VoIP performance. |
| 134 | + Log.d(TAG, "Starting the audio manager..."); |
| 135 | + rtcAudioManager.start(new RTCAudioManager.AudioManagerEvents() { |
| 136 | + // This method will be called each time the number of available audio |
| 137 | + // devices has changed. |
| 138 | + @Override |
| 139 | + public void onAudioDeviceChanged( |
| 140 | + RTCAudioManager.AudioDevice audioDevice, Set<RTCAudioManager.AudioDevice> availableAudioDevices) { |
| 141 | + onAudioManagerDevicesChanged(audioDevice, availableAudioDevices); |
| 142 | + } |
| 143 | + }); |
| 144 | + /* |
| 145 | + if (audioManager != null) { |
| 146 | + audioManager.stop(); |
| 147 | + audioManager = null; |
| 148 | + } |
| 149 | + */ |
| 150 | + } |
| 151 | + |
| 152 | + // This method is called when the audio manager reports audio device change, |
| 153 | + // e.g. from wired headset to speakerphone. |
| 154 | + private void onAudioManagerDevicesChanged( |
| 155 | + final RTCAudioManager.AudioDevice device, final Set<RTCAudioManager.AudioDevice> availableDevices) { |
| 156 | + Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", " |
| 157 | + + "selected: " + device); |
| 158 | + // TODO(henrika): add callback handler. |
127 | 159 | }
|
128 | 160 |
|
129 | 161 | @Override
|
@@ -942,13 +974,7 @@ public void mediaStreamTrackSetMicrophoneMute(final String id, boolean mute) {
|
942 | 974 | }
|
943 | 975 |
|
944 | 976 | public void mediaStreamTrackEnableSpeakerphone(final String id, boolean enabled) {
|
945 |
| - AudioManager audioManager = (AudioManager) getContext().getSystemService(Context.AUDIO_SERVICE); |
946 |
| - |
947 |
| - try { |
948 |
| - audioManager.setSpeakerphoneOn(enabled); |
949 |
| - } catch (Exception e) { |
950 |
| - Log.e(TAG, "setSpeakerphoneOn(): error", e); |
951 |
| - } |
| 977 | + rtcAudioManager.selectAudioDevice(enabled? RTCAudioManager.AudioDevice.SPEAKER_PHONE : RTCAudioManager.AudioDevice.EARPIECE); |
952 | 978 | }
|
953 | 979 |
|
954 | 980 | public void mediaStreamAddTrack(final String streaemId, final String trackId, Result result) {
|
|
0 commit comments