diff --git a/CHANGELOG.md b/CHANGELOG.md index 4f067a20dd..409eda5c68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # Changelog -------------------------------------------- + [0.9.3] - 2022-08-15 * [Windows/macOS] Fix UI freeze when getting thumbnails. @@ -11,6 +12,11 @@ * [iOS/macOS] update WebRTC-SDK to 104.5112.02. * [Windows] update libwebrtc.dll to 104.5112.02. +[0.9.1] - 2022-08-01 + +* [iOS] fix : iOS app could not change camera resolutions cause by wrong datatype in the video Contraints. +* [Darwin] bump version for .podspec. + [0.9.0] - 2022-07-27 * [macOS] Added screen-sharing support for macOS diff --git a/android/build.gradle b/android/build.gradle index 51383e7371..5c0c1f21fb 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -50,6 +50,7 @@ android { dependencies { implementation 'com.github.webrtc-sdk:android:104.5112.01' + implementation "com.twilio:audioswitch:1.1.5" implementation 'androidx.annotation:annotation:1.1.0' implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java index 0a27978579..41f61f3ba3 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java @@ -11,7 +11,6 @@ import org.webrtc.EglBase; import org.webrtc.MediaStream; -import org.webrtc.MediaStreamTrack; import org.webrtc.RendererCommon.RendererEvents; import org.webrtc.VideoTrack; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java index 56690c4df4..3d89388ceb 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java @@ -5,37 +5,41 @@ import android.content.Context; import android.os.Bundle; import android.util.Log; + import androidx.annotation.NonNull; import androidx.lifecycle.DefaultLifecycleObserver; import androidx.lifecycle.Lifecycle; import androidx.lifecycle.LifecycleOwner; -import com.cloudwebrtc.webrtc.MethodCallHandlerImpl.AudioManager; -import com.cloudwebrtc.webrtc.utils.RTCAudioManager; -import io.flutter.embedding.android.FlutterActivity; + +import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; +import com.cloudwebrtc.webrtc.utils.AnyThreadSink; +import com.cloudwebrtc.webrtc.utils.ConstraintsMap; + import io.flutter.embedding.engine.plugins.FlutterPlugin; import io.flutter.embedding.engine.plugins.activity.ActivityAware; import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding; import io.flutter.embedding.engine.plugins.lifecycle.HiddenLifecycleReference; import io.flutter.plugin.common.BinaryMessenger; +import io.flutter.plugin.common.EventChannel; import io.flutter.plugin.common.MethodChannel; import io.flutter.plugin.common.PluginRegistry.Registrar; import io.flutter.view.TextureRegistry; -import java.util.Set; - /** * FlutterWebRTCPlugin */ -public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware { +public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware, EventChannel.StreamHandler { static public final String TAG = "FlutterWebRTCPlugin"; private static Application application; - private RTCAudioManager rtcAudioManager; - private MethodChannel channel; + private AudioSwitchManager audioSwitchManager; + private MethodChannel methodChannel; private MethodCallHandlerImpl methodCallHandler; private LifeCycleObserver observer; private Lifecycle lifecycle; + private EventChannel eventChannel; + public EventChannel.EventSink eventSink; public FlutterWebRTCPlugin() { } @@ -103,62 +107,48 @@ public void onDetachedFromActivity() { private void startListening(final Context context, BinaryMessenger messenger, TextureRegistry textureRegistry) { + audioSwitchManager = new AudioSwitchManager(context); methodCallHandler = new MethodCallHandlerImpl(context, messenger, textureRegistry, - new AudioManager() { - @Override - public void onAudioManagerRequested(boolean requested) { - if (requested) { - if (rtcAudioManager == null) { - rtcAudioManager = RTCAudioManager.create(context); - } - rtcAudioManager.start(FlutterWebRTCPlugin.this::onAudioManagerDevicesChanged); - } else { - if (rtcAudioManager != null) { - rtcAudioManager.stop(); - rtcAudioManager = null; - } - } - } - - @Override - public void setMicrophoneMute(boolean mute) { - if (rtcAudioManager != null) { - rtcAudioManager.setMicrophoneMute(mute); - } - } - - @Override - public void setSpeakerphoneOn(boolean on) { - if (rtcAudioManager != null) { - rtcAudioManager.setSpeakerphoneOn(on); - } - } - }); - - channel = new MethodChannel(messenger, "FlutterWebRTC.Method"); - channel.setMethodCallHandler(methodCallHandler); + audioSwitchManager); + methodChannel = new MethodChannel(messenger, "FlutterWebRTC.Method"); + methodChannel.setMethodCallHandler(methodCallHandler); + eventChannel = new EventChannel( messenger,"FlutterWebRTC.Event"); + eventChannel.setStreamHandler(this); + audioSwitchManager.audioDeviceChangeListener = (devices, currentDevice) -> { + Log.w(TAG, "audioFocusChangeListener " + devices+ " " + currentDevice); + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onDeviceChange"); + sendEvent(params.toMap()); + return null; + }; + audioSwitchManager.start(); } private void stopListening() { methodCallHandler.dispose(); methodCallHandler = null; - channel.setMethodCallHandler(null); - - if (rtcAudioManager != null) { + methodChannel.setMethodCallHandler(null); + eventChannel.setStreamHandler(null); + if (audioSwitchManager != null) { Log.d(TAG, "Stopping the audio manager..."); - rtcAudioManager.stop(); - rtcAudioManager = null; + audioSwitchManager.stop(); + audioSwitchManager = null; } } - // This method is called when the audio manager reports audio device change, - // e.g. from wired headset to speakerphone. - private void onAudioManagerDevicesChanged( - final RTCAudioManager.AudioDevice device, - final Set availableDevices) { - Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", " - + "selected: " + device); - // TODO(henrika): add callback handler. + @Override + public void onListen(Object arguments, EventChannel.EventSink events) { + eventSink = new AnyThreadSink(events); + } + @Override + public void onCancel(Object arguments) { + eventSink = null; + } + + public void sendEvent(Object event) { + if(eventSink != null) { + eventSink.success(event); + } } private class LifeCycleObserver implements Application.ActivityLifecycleCallbacks, DefaultLifecycleObserver { diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java index f5203f57f1..46e0b5438b 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java @@ -1065,10 +1065,10 @@ public class VideoCapturerInfo { } @RequiresApi(api = VERSION_CODES.M) - void setPreferredInputDevice(int i){ + void setPreferredInputDevice(int i) { android.media.AudioManager audioManager = ((android.media.AudioManager) applicationContext.getSystemService(Context.AUDIO_SERVICE)); final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_INPUTS); - if (devices.length>i){ + if (devices.length > i) { audioDeviceModule.setPreferredInputDevice(devices[i]); } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index 4d6f477512..921c50ee5a 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -18,6 +18,8 @@ import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; +import com.cloudwebrtc.webrtc.audio.AudioDeviceKind; +import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; import com.cloudwebrtc.webrtc.record.AudioChannel; import com.cloudwebrtc.webrtc.record.FrameCapturer; import com.cloudwebrtc.webrtc.utils.AnyThreadResult; @@ -28,6 +30,8 @@ import com.cloudwebrtc.webrtc.utils.ObjectType; import com.cloudwebrtc.webrtc.utils.PermissionUtils; +import com.twilio.audioswitch.AudioDevice; + import org.webrtc.AudioTrack; import org.webrtc.CryptoOptions; import org.webrtc.DefaultVideoDecoderFactory; @@ -81,17 +85,6 @@ import io.flutter.view.TextureRegistry.SurfaceTextureEntry; public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider { - interface AudioManager { - - void onAudioManagerRequested(boolean requested); - - void setMicrophoneMute(boolean mute); - - void setSpeakerphoneOn(boolean on); - - - } - static public final String TAG = "FlutterWebRTCPlugin"; private final Map mPeerConnectionObservers = new HashMap<>(); @@ -112,18 +105,18 @@ interface AudioManager { */ private GetUserMediaImpl getUserMediaImpl; - private final AudioManager audioManager; + private final AudioSwitchManager audioSwitchManager; private AudioDeviceModule audioDeviceModule; private Activity activity; MethodCallHandlerImpl(Context context, BinaryMessenger messenger, TextureRegistry textureRegistry, - @NonNull AudioManager audioManager) { + @NonNull AudioSwitchManager audioManager) { this.context = context; this.textures = textureRegistry; this.messenger = messenger; - this.audioManager = audioManager; + this.audioSwitchManager = audioManager; } static private void resultError(String method, String error, Result result) { @@ -472,14 +465,29 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { result.success(null); break; } + case "selectAudioOutput": { + String deviceId = call.argument("deviceId"); + audioSwitchManager.selectAudioOutput(AudioDeviceKind.fromTypeName(deviceId)); + result.success(null); + break; + } case "setMicrophoneMute": boolean mute = call.argument("mute"); - audioManager.setMicrophoneMute(mute); + audioSwitchManager.setMicrophoneMute(mute); result.success(null); break; + case "selectAudioInput": + if (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1) { + String deviceId = call.argument("deviceId"); + getUserMediaImpl.setPreferredInputDevice(Integer.parseInt(deviceId)); + result.success(null); + } else { + result.notImplemented(); + } + break; case "enableSpeakerphone": boolean enable = call.argument("enable"); - audioManager.setSpeakerphoneOn(enable); + audioSwitchManager.enableSpeakerphone(enable); result.success(null); break; case "getDisplayMedia": { @@ -980,14 +988,14 @@ public String peerConnectionInit(ConstraintsMap configuration, ConstraintsMap co if (mPeerConnectionObservers.size() == 0) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S || context.getApplicationInfo().targetSdkVersion < Build.VERSION_CODES.S) { - audioManager.onAudioManagerRequested(true); + //audioSwitchManager.start(); } else { ArrayList permissions = new ArrayList<>(); permissions.add(Manifest.permission.BLUETOOTH_CONNECT); requestPermissions( permissions, (args) -> { - audioManager.onAudioManagerRequested(true); + //audioSwitchManager.start(); }, (args) -> { }); @@ -1143,22 +1151,36 @@ public void getSources(Result result) { audio.putString("kind", "audioinput"); array.pushMap(audio); } else { - android.media.AudioManager audioManager = ((android.media.AudioManager) context.getSystemService(Context.AUDIO_SERVICE)); + android.media.AudioManager audioManager = ((android.media.AudioManager) context + .getSystemService(Context.AUDIO_SERVICE)); final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_INPUTS); - for (int i=0;i audioOutputs = audioSwitchManager.availableAudioDevices(); + + for (AudioDevice audioOutput : audioOutputs) { + ConstraintsMap audioOutputMap = new ConstraintsMap(); + audioOutputMap.putString("label", audioOutput.getName()); + audioOutputMap.putString("deviceId", AudioDeviceKind.fromAudioDevice(audioOutput).typeName); + audioOutputMap.putString("facing", ""); + audioOutputMap.putString("kind", "audiooutput"); + array.pushMap(audioOutputMap); + } ConstraintsMap map = new ConstraintsMap(); map.putArray("sources", array.toArrayList()); @@ -1545,7 +1567,7 @@ public void peerConnectionDispose(final String id) { mPeerConnectionObservers.remove(id); } if (mPeerConnectionObservers.size() == 0) { - audioManager.onAudioManagerRequested(false); + //audioSwitchManager.stop(); } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java index 633ddf3af7..aa3f0df38f 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java @@ -1,7 +1,6 @@ package com.cloudwebrtc.webrtc; import android.util.Log; -import android.util.SparseArray; import androidx.annotation.Nullable; import com.cloudwebrtc.webrtc.utils.AnyThreadSink; import com.cloudwebrtc.webrtc.utils.ConstraintsArray; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt b/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt index 9bde62f0c3..c7f5cb42dd 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt +++ b/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt @@ -1,6 +1,5 @@ package com.cloudwebrtc.webrtc -import android.util.Log import org.webrtc.* import java.util.concurrent.Callable import java.util.concurrent.ExecutorService diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java new file mode 100644 index 0000000000..b86f5ecef7 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java @@ -0,0 +1,40 @@ +package com.cloudwebrtc.webrtc.audio; + +import androidx.annotation.Nullable; + +import com.twilio.audioswitch.AudioDevice; + +public enum AudioDeviceKind { + BLUETOOTH("bluetooth", AudioDevice.BluetoothHeadset.class), + WIRED_HEADSET("headset", AudioDevice.WiredHeadset.class), + SPEAKER("speaker", AudioDevice.Speakerphone.class), + EARPIECE("earpiece", AudioDevice.Earpiece.class); + + public final String typeName; + public final Class audioDeviceClass; + + AudioDeviceKind(String typeName, Class audioDeviceClass) { + this.typeName = typeName; + this.audioDeviceClass = audioDeviceClass; + } + + @Nullable + public static AudioDeviceKind fromAudioDevice(AudioDevice audioDevice) { + for (AudioDeviceKind kind : values()) { + if (kind.audioDeviceClass.equals(audioDevice.getClass())) { + return kind; + } + } + return null; + } + + @Nullable + public static AudioDeviceKind fromTypeName(String typeName) { + for (AudioDeviceKind kind : values()) { + if (kind.typeName.equals(typeName)) { + return kind; + } + } + return null; + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java new file mode 100644 index 0000000000..61efdbdc64 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java @@ -0,0 +1,140 @@ +package com.cloudwebrtc.webrtc.audio; + +import android.content.Context; +import android.media.AudioManager; +import android.os.Handler; +import android.os.Looper; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; + +import com.twilio.audioswitch.AudioDevice; +import com.twilio.audioswitch.AudioSwitch; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import kotlin.Unit; +import kotlin.jvm.functions.Function2; + +public class AudioSwitchManager { + @NonNull + private final Context context; + @NonNull + private final AudioManager audioManager; + + public boolean loggingEnabled; + @NonNull + public Function2< + ? super List, + ? super AudioDevice, + Unit> audioDeviceChangeListener = (devices, currentDevice) -> null; + + @NonNull + public AudioManager.OnAudioFocusChangeListener audioFocusChangeListener = (i -> {}); + + @NonNull + public List> preferredDeviceList; + + // AudioSwitch is not threadsafe, so all calls should be done on the main thread. + private final Handler handler = new Handler(Looper.getMainLooper()); + + @Nullable + private AudioSwitch audioSwitch; + + public AudioSwitchManager(@NonNull Context context) { + this.context = context; + this.audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); + + preferredDeviceList = new ArrayList<>(); + preferredDeviceList.add(AudioDevice.BluetoothHeadset.class); + preferredDeviceList.add(AudioDevice.WiredHeadset.class); + preferredDeviceList.add(AudioDevice.Speakerphone.class); + preferredDeviceList.add(AudioDevice.Earpiece.class); + } + + public void start() { + if (audioSwitch == null) { + handler.removeCallbacksAndMessages(null); + handler.postAtFrontOfQueue(() -> { + audioSwitch = new AudioSwitch( + context, + loggingEnabled, + audioFocusChangeListener, + preferredDeviceList + ); + audioSwitch.start(audioDeviceChangeListener); + audioSwitch.activate(); + }); + } + } + + public void stop() { + handler.removeCallbacksAndMessages(null); + handler.postAtFrontOfQueue(() -> { + if (audioSwitch != null) { + audioSwitch.stop(); + } + audioSwitch = null; + }); + } + + public void setMicrophoneMute(boolean mute){ + audioManager.setMicrophoneMute(mute); + } + + @Nullable + public AudioDevice selectedAudioDevice() { + AudioSwitch audioSwitchTemp = audioSwitch; + if (audioSwitchTemp != null) { + return audioSwitchTemp.getSelectedAudioDevice(); + } else { + return null; + } + } + + @NonNull + public List availableAudioDevices() { + AudioSwitch audioSwitchTemp = audioSwitch; + if (audioSwitchTemp != null) { + return audioSwitchTemp.getAvailableAudioDevices(); + } else { + return Collections.emptyList(); + } + } + + public void selectAudioOutput(@NonNull Class audioDeviceClass) { + handler.post(() -> { + if (audioSwitch != null) { + List devices = availableAudioDevices(); + AudioDevice audioDevice = null; + + for (AudioDevice device : devices) { + if (device.getClass().equals(audioDeviceClass)) { + audioDevice = device; + break; + } + } + + if (audioDevice != null) { + audioSwitch.selectDevice(audioDevice); + } + } + }); + } + + public void enableSpeakerphone(boolean enable) { + if(enable) { + audioManager.setSpeakerphoneOn(true); + } else { + audioManager.setSpeakerphoneOn(false); + } + } + + public void selectAudioOutput(@Nullable AudioDeviceKind kind) { + if (kind != null) { + selectAudioOutput(kind.audioDeviceClass); + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCAudioManager.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCAudioManager.java deleted file mode 100644 index 475059116d..0000000000 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCAudioManager.java +++ /dev/null @@ -1,598 +0,0 @@ -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package com.cloudwebrtc.webrtc.utils; - -import android.content.BroadcastReceiver; -import android.content.Context; -import android.content.Intent; -import android.content.IntentFilter; -import android.content.SharedPreferences; -import android.content.pm.PackageManager; -import android.media.AudioDeviceInfo; -import android.media.AudioManager; -import android.os.Build; -import android.preference.PreferenceManager; -import android.util.Log; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; -import com.cloudwebrtc.webrtc.utils.RTCUtils; -import org.webrtc.ThreadUtils; - -/** - * RTCAudioManager manages all audio related parts of the plugin. - */ -public class RTCAudioManager { - private static final String TAG = "RTCAudioManager"; - private static final String SPEAKERPHONE_AUTO = "auto"; - private static final String SPEAKERPHONE_TRUE = "true"; - private static final String SPEAKERPHONE_FALSE = "false"; - - /** - * AudioDevice is the names of possible audio devices that we currently - * support. - */ - public enum AudioDevice { SPEAKER_PHONE, WIRED_HEADSET, EARPIECE, BLUETOOTH, NONE } - - /** AudioManager state. */ - public enum AudioManagerState { - UNINITIALIZED, - PREINITIALIZED, - RUNNING, - } - - /** Selected audio device change event. */ - public interface AudioManagerEvents { - // Callback fired once audio device is changed or list of available audio devices changed. - void onAudioDeviceChanged( - AudioDevice selectedAudioDevice, Set availableAudioDevices); - } - - private final Context appContext; - private AudioManager audioManager; - - private AudioManagerEvents audioManagerEvents; - private AudioManagerState amState; - private int savedAudioMode = AudioManager.MODE_INVALID; - private boolean savedIsSpeakerPhoneOn; - private boolean savedIsMicrophoneMute; - private boolean hasWiredHeadset; - - // Default audio device; speaker phone for video calls or earpiece for audio - // only calls. - private AudioDevice defaultAudioDevice; - - // Contains the currently selected audio device. - // This device is changed automatically using a certain scheme where e.g. - // a wired headset "wins" over speaker phone. It is also possible for a - // user to explicitly select a device (and overrid any predefined scheme). - // See |userSelectedAudioDevice| for details. - private AudioDevice selectedAudioDevice; - - // Contains the user-selected audio device which overrides the predefined - // selection scheme. - // TODO(henrika): always set to AudioDevice.NONE today. Add support for - // explicit selection based on choice by userSelectedAudioDevice. - private AudioDevice userSelectedAudioDevice; - - // Contains speakerphone setting: auto, true or false - private final String useSpeakerphone; - - // Proximity sensor object. It measures the proximity of an object in cm - // relative to the view screen of a device and can therefore be used to - // assist device switching (close to ear <=> use headset earpiece if - // available, far from ear <=> use speaker phone). - private RTCProximitySensor proximitySensor; - - // Handles all tasks related to Bluetooth headset devices. - private final RTCBluetoothManager bluetoothManager; - - // Contains a list of available audio devices. A Set collection is used to - // avoid duplicate elements. - private Set audioDevices = new HashSet<>(); - - // Broadcast receiver for wired headset intent broadcasts. - private BroadcastReceiver wiredHeadsetReceiver; - - // Callback method for changes in audio focus. - - private AudioManager.OnAudioFocusChangeListener audioFocusChangeListener; - - /** - * This method is called when the proximity sensor reports a state change, - * e.g. from "NEAR to FAR" or from "FAR to NEAR". - */ - private void onProximitySensorChangedState() { - if (!useSpeakerphone.equals(SPEAKERPHONE_AUTO)) { - return; - } - - // The proximity sensor should only be activated when there are exactly two - // available audio devices. - if (audioDevices.size() == 2 && audioDevices.contains(RTCAudioManager.AudioDevice.EARPIECE) - && audioDevices.contains(RTCAudioManager.AudioDevice.SPEAKER_PHONE)) { - if (proximitySensor.sensorReportsNearState()) { - // Sensor reports that a "handset is being held up to a person's ear", - // or "something is covering the light sensor". - setAudioDeviceInternal(RTCAudioManager.AudioDevice.EARPIECE); - } else { - // Sensor reports that a "handset is removed from a person's ear", or - // "the light sensor is no longer covered". - setAudioDeviceInternal(RTCAudioManager.AudioDevice.SPEAKER_PHONE); - } - } - } - - /* Receiver which handles changes in wired headset availability. */ - private class WiredHeadsetReceiver extends BroadcastReceiver { - private static final int STATE_UNPLUGGED = 0; - private static final int STATE_PLUGGED = 1; - private static final int HAS_NO_MIC = 0; - private static final int HAS_MIC = 1; - - @Override - public void onReceive(Context context, Intent intent) { - int state = intent.getIntExtra("state", STATE_UNPLUGGED); - int microphone = intent.getIntExtra("microphone", HAS_NO_MIC); - String name = intent.getStringExtra("name"); - Log.d(TAG, "WiredHeadsetReceiver.onReceive" + RTCUtils.getThreadInfo() + ": " - + "a=" + intent.getAction() + ", s=" - + (state == STATE_UNPLUGGED ? "unplugged" : "plugged") + ", m=" - + (microphone == HAS_MIC ? "mic" : "no mic") + ", n=" + name + ", sb=" - + isInitialStickyBroadcast()); - hasWiredHeadset = (state == STATE_PLUGGED); - updateAudioDeviceState(); - } - } - - /** Construction. */ - public static RTCAudioManager create(Context context) { - return new RTCAudioManager(context); - } - - private RTCAudioManager(Context context) { - Log.d(TAG, "ctor"); - ThreadUtils.checkIsOnMainThread(); - appContext = context; - audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE)); - bluetoothManager = RTCBluetoothManager.create(context, this); - wiredHeadsetReceiver = new WiredHeadsetReceiver(); - amState = AudioManagerState.UNINITIALIZED; - - useSpeakerphone = SPEAKERPHONE_AUTO; - - Log.d(TAG, "useSpeakerphone: " + useSpeakerphone); - if (useSpeakerphone.equals(SPEAKERPHONE_FALSE)) { - defaultAudioDevice = AudioDevice.EARPIECE; - } else { - defaultAudioDevice = AudioDevice.SPEAKER_PHONE; - } - - // Create and initialize the proximity sensor. - // Tablet devices (e.g. Nexus 7) does not support proximity sensors. - // Note that, the sensor will not be active until start() has been called. - proximitySensor = RTCProximitySensor.create(context, - // This method will be called each time a state change is detected. - // Example: user holds his hand over the device (closer than ~5 cm), - // or removes his hand from the device. - this ::onProximitySensorChangedState); - - Log.d(TAG, "defaultAudioDevice: " + defaultAudioDevice); - RTCUtils.logDeviceInfo(TAG); - } - - @SuppressWarnings("deprecation") // TODO(henrika): audioManager.requestAudioFocus() is deprecated. - public void start(AudioManagerEvents audioManagerEvents) { - Log.d(TAG, "start"); - ThreadUtils.checkIsOnMainThread(); - if (amState == AudioManagerState.RUNNING) { - Log.e(TAG, "AudioManager is already active"); - return; - } - // TODO(henrika): perhaps call new method called preInitAudio() here if UNINITIALIZED. - - Log.d(TAG, "AudioManager starts..."); - this.audioManagerEvents = audioManagerEvents; - amState = AudioManagerState.RUNNING; - - // Store current audio state so we can restore it when stop() is called. - savedAudioMode = audioManager.getMode(); - savedIsSpeakerPhoneOn = audioManager.isSpeakerphoneOn(); - savedIsMicrophoneMute = audioManager.isMicrophoneMute(); - hasWiredHeadset = hasWiredHeadset(); - - // Create an AudioManager.OnAudioFocusChangeListener instance. - audioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() { - // Called on the listener to notify if the audio focus for this listener has been changed. - // The |focusChange| value indicates whether the focus was gained, whether the focus was lost, - // and whether that loss is transient, or whether the new focus holder will hold it for an - // unknown amount of time. - // TODO(henrika): possibly extend support of handling audio-focus changes. Only contains - // logging for now. - @Override - public void onAudioFocusChange(int focusChange) { - final String typeOfChange; - switch (focusChange) { - case AudioManager.AUDIOFOCUS_GAIN: - typeOfChange = "AUDIOFOCUS_GAIN"; - break; - case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT: - typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT"; - break; - case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE: - typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE"; - break; - case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK: - typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK"; - break; - case AudioManager.AUDIOFOCUS_LOSS: - typeOfChange = "AUDIOFOCUS_LOSS"; - break; - case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT: - typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT"; - break; - case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK: - typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK"; - break; - default: - typeOfChange = "AUDIOFOCUS_INVALID"; - break; - } - Log.d(TAG, "onAudioFocusChange: " + typeOfChange); - } - }; - - // Request audio playout focus (without ducking) and install listener for changes in focus. - int result = audioManager.requestAudioFocus(audioFocusChangeListener, - AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT); - if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { - Log.d(TAG, "Audio focus request granted for VOICE_CALL streams"); - } else { - Log.e(TAG, "Audio focus request failed"); - } - - // Start by setting MODE_IN_COMMUNICATION as default audio mode. It is - // required to be in this mode when playout and/or recording starts for - // best possible VoIP performance. - audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION); - - // Always disable microphone mute during a WebRTC call. - setMicrophoneMute(false); - - // Set initial device states. - userSelectedAudioDevice = AudioDevice.NONE; - selectedAudioDevice = AudioDevice.NONE; - audioDevices.clear(); - - // Initialize and start Bluetooth if a BT device is available or initiate - // detection of new (enabled) BT devices. - bluetoothManager.start(); - - // Do initial selection of audio device. This setting can later be changed - // either by adding/removing a BT or wired headset or by covering/uncovering - // the proximity sensor. - updateAudioDeviceState(); - - // Register receiver for broadcast intents related to adding/removing a - // wired headset. - registerReceiver(wiredHeadsetReceiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG)); - Log.d(TAG, "AudioManager started"); - } - - @SuppressWarnings("deprecation") // TODO(henrika): audioManager.abandonAudioFocus() is deprecated. - public void stop() { - Log.d(TAG, "stop"); - ThreadUtils.checkIsOnMainThread(); - if (amState != AudioManagerState.RUNNING) { - Log.e(TAG, "Trying to stop AudioManager in incorrect state: " + amState); - return; - } - amState = AudioManagerState.UNINITIALIZED; - - unregisterReceiver(wiredHeadsetReceiver); - - bluetoothManager.stop(); - - // Restore previously stored audio states. - setSpeakerphoneOn(savedIsSpeakerPhoneOn); - setMicrophoneMute(savedIsMicrophoneMute); - audioManager.setMode(savedAudioMode); - - // Abandon audio focus. Gives the previous focus owner, if any, focus. - audioManager.abandonAudioFocus(audioFocusChangeListener); - audioFocusChangeListener = null; - Log.d(TAG, "Abandoned audio focus for VOICE_CALL streams"); - - if (proximitySensor != null) { - proximitySensor.stop(); - proximitySensor = null; - } - - audioManagerEvents = null; - Log.d(TAG, "AudioManager stopped"); - } - - /** Changes selection of the currently active audio device. */ - private void setAudioDeviceInternal(AudioDevice device) { - Log.d(TAG, "setAudioDeviceInternal(device=" + device + ")"); - RTCUtils.assertIsTrue(audioDevices.contains(device)); - - switch (device) { - case SPEAKER_PHONE: - setSpeakerphoneOn(true); - break; - case EARPIECE: - setSpeakerphoneOn(false); - break; - case WIRED_HEADSET: - setSpeakerphoneOn(false); - break; - case BLUETOOTH: - setSpeakerphoneOn(false); - break; - default: - Log.e(TAG, "Invalid audio device selection"); - break; - } - selectedAudioDevice = device; - } - - /** - * Changes default audio device. - * TODO(henrika): add usage of this method in the RTCMobile client. - */ - public void setDefaultAudioDevice(AudioDevice defaultDevice) { - ThreadUtils.checkIsOnMainThread(); - switch (defaultDevice) { - case EARPIECE: - if (hasEarpiece()) { - defaultAudioDevice = defaultDevice; - } else { - defaultAudioDevice = AudioDevice.SPEAKER_PHONE; - } - break; - case SPEAKER_PHONE: - defaultAudioDevice = defaultDevice; - break; - default: - Log.e(TAG, "Invalid default audio device selection"); - break; - } - Log.d(TAG, "setDefaultAudioDevice(device=" + defaultAudioDevice + ")"); - updateAudioDeviceState(); - } - - /** Changes selection of the currently active audio device. */ - public void selectAudioDevice(AudioDevice device) { - ThreadUtils.checkIsOnMainThread(); - if (!audioDevices.contains(device)) { - Log.e(TAG, "Can not select " + device + " from available " + audioDevices); - } - userSelectedAudioDevice = device; - updateAudioDeviceState(); - } - - /** Returns current set of available/selectable audio devices. */ - public Set getAudioDevices() { - ThreadUtils.checkIsOnMainThread(); - return Collections.unmodifiableSet(new HashSet<>(audioDevices)); - } - - /** Returns the currently selected audio device. */ - public AudioDevice getSelectedAudioDevice() { - ThreadUtils.checkIsOnMainThread(); - return selectedAudioDevice; - } - - /** Helper method for receiver registration. */ - private void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) { - appContext.registerReceiver(receiver, filter); - } - - /** Helper method for unregistration of an existing receiver. */ - private void unregisterReceiver(BroadcastReceiver receiver) { - appContext.unregisterReceiver(receiver); - } - - /** Sets the speaker phone mode. */ - public void setSpeakerphoneOn(boolean on) { - boolean wasOn = audioManager.isSpeakerphoneOn(); - if (wasOn == on) { - return; - } - final RTCBluetoothManager.State btManagerState = bluetoothManager.getState(); - final boolean isBTAvailable = - btManagerState == RTCBluetoothManager.State.SCO_CONNECTED - || btManagerState == RTCBluetoothManager.State.SCO_CONNECTING - || btManagerState == RTCBluetoothManager.State.HEADSET_AVAILABLE; - if(!on && isBTAvailable){ - bluetoothManager.startScoAudio(); - } - audioManager.setSpeakerphoneOn(on); - } - - /** Sets the microphone mute state. */ - public void setMicrophoneMute(boolean on) { - boolean wasMuted = audioManager.isMicrophoneMute(); - if (wasMuted == on) { - return; - } - audioManager.setMicrophoneMute(on); - } - - /** Gets the current earpiece state. */ - private boolean hasEarpiece() { - return appContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY); - } - - /** - * Checks whether a wired headset is connected or not. - * This is not a valid indication that audio playback is actually over - * the wired headset as audio routing depends on other conditions. We - * only use it as an early indicator (during initialization) of an attached - * wired headset. - */ - @Deprecated - private boolean hasWiredHeadset() { - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { - return audioManager.isWiredHeadsetOn(); - } else { - final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL); - for (AudioDeviceInfo device : devices) { - final int type = device.getType(); - if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) { - Log.d(TAG, "hasWiredHeadset: found wired headset"); - return true; - } else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) { - Log.d(TAG, "hasWiredHeadset: found USB audio device"); - return true; - } - } - return false; - } - } - - /** - * Updates list of possible audio devices and make new device selection. - * TODO(henrika): add unit test to verify all state transitions. - */ - public void updateAudioDeviceState() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "--- updateAudioDeviceState: " - + "wired headset=" + hasWiredHeadset + ", " - + "BT state=" + bluetoothManager.getState()); - Log.d(TAG, "Device status: " - + "available=" + audioDevices + ", " - + "selected=" + selectedAudioDevice + ", " - + "user selected=" + userSelectedAudioDevice); - - // Check if any Bluetooth headset is connected. The internal BT state will - // change accordingly. - // TODO(henrika): perhaps wrap required state into BT manager. - if (bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_AVAILABLE - || bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_UNAVAILABLE - || bluetoothManager.getState() == RTCBluetoothManager.State.SCO_DISCONNECTING) { - bluetoothManager.updateDevice(); - } - - // Update the set of available audio devices. - Set newAudioDevices = new HashSet<>(); - - if (bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTED - || bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTING - || bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_AVAILABLE) { - newAudioDevices.add(AudioDevice.BLUETOOTH); - } - - if (hasWiredHeadset) { - // If a wired headset is connected, then it is the only possible option. - newAudioDevices.add(AudioDevice.WIRED_HEADSET); - } else { - // No wired headset, hence the audio-device list can contain speaker - // phone (on a tablet), or speaker phone and earpiece (on mobile phone). - newAudioDevices.add(AudioDevice.SPEAKER_PHONE); - if (hasEarpiece()) { - newAudioDevices.add(AudioDevice.EARPIECE); - } - } - // Store state which is set to true if the device list has changed. - boolean audioDeviceSetUpdated = !audioDevices.equals(newAudioDevices); - // Update the existing audio device set. - audioDevices = newAudioDevices; - // Correct user selected audio devices if needed. - if (bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_UNAVAILABLE - && userSelectedAudioDevice == AudioDevice.BLUETOOTH) { - // If BT is not available, it can't be the user selection. - userSelectedAudioDevice = AudioDevice.NONE; - } - if (hasWiredHeadset && userSelectedAudioDevice == AudioDevice.SPEAKER_PHONE) { - // If user selected speaker phone, but then plugged wired headset then make - // wired headset as user selected device. - userSelectedAudioDevice = AudioDevice.WIRED_HEADSET; - } - if (!hasWiredHeadset && userSelectedAudioDevice == AudioDevice.WIRED_HEADSET) { - // If user selected wired headset, but then unplugged wired headset then make - // speaker phone as user selected device. - userSelectedAudioDevice = AudioDevice.SPEAKER_PHONE; - } - - // Need to start Bluetooth if it is available and user either selected it explicitly or - // user did not select any output device. - boolean needBluetoothAudioStart = - bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_AVAILABLE - && (userSelectedAudioDevice == AudioDevice.NONE - || userSelectedAudioDevice == AudioDevice.BLUETOOTH); - - // Need to stop Bluetooth audio if user selected different device and - // Bluetooth SCO connection is established or in the process. - boolean needBluetoothAudioStop = - (bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTED - || bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTING) - && (userSelectedAudioDevice != AudioDevice.NONE - && userSelectedAudioDevice != AudioDevice.BLUETOOTH); - - if (bluetoothManager.getState() == RTCBluetoothManager.State.HEADSET_AVAILABLE - || bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTING - || bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTED) { - Log.d(TAG, "Need BT audio: start=" + needBluetoothAudioStart + ", " - + "stop=" + needBluetoothAudioStop + ", " - + "BT state=" + bluetoothManager.getState()); - } - - // Start or stop Bluetooth SCO connection given states set earlier. - if (needBluetoothAudioStop) { - bluetoothManager.stopScoAudio(); - bluetoothManager.updateDevice(); - } - - if (needBluetoothAudioStart && !needBluetoothAudioStop) { - // Attempt to start Bluetooth SCO audio (takes a few second to start). - if (!bluetoothManager.startScoAudio()) { - // Remove BLUETOOTH from list of available devices since SCO failed. - audioDevices.remove(AudioDevice.BLUETOOTH); - audioDeviceSetUpdated = true; - } - } - - // Update selected audio device. - final AudioDevice newAudioDevice; - - if (bluetoothManager.getState() == RTCBluetoothManager.State.SCO_CONNECTED) { - // If a Bluetooth is connected, then it should be used as output audio - // device. Note that it is not sufficient that a headset is available; - // an active SCO channel must also be up and running. - newAudioDevice = AudioDevice.BLUETOOTH; - } else if (hasWiredHeadset) { - // If a wired headset is connected, but Bluetooth is not, then wired headset is used as - // audio device. - newAudioDevice = AudioDevice.WIRED_HEADSET; - } else { - // No wired headset and no Bluetooth, hence the audio-device list can contain speaker - // phone (on a tablet), or speaker phone and earpiece (on mobile phone). - // |defaultAudioDevice| contains either AudioDevice.SPEAKER_PHONE or AudioDevice.EARPIECE - // depending on the user's selection. - newAudioDevice = defaultAudioDevice; - } - // Switch to new device but only if there has been any changes. - if (newAudioDevice != selectedAudioDevice || audioDeviceSetUpdated) { - // Do the required device switch. - setAudioDeviceInternal(newAudioDevice); - Log.d(TAG, "New device status: " - + "available=" + audioDevices + ", " - + "selected=" + newAudioDevice); - if (audioManagerEvents != null) { - // Notify a listening client that audio device has been changed. - audioManagerEvents.onAudioDeviceChanged(selectedAudioDevice, audioDevices); - } - } - Log.d(TAG, "--- updateAudioDeviceState done"); - } -} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCBluetoothManager.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCBluetoothManager.java deleted file mode 100644 index 0de1d6fc10..0000000000 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCBluetoothManager.java +++ /dev/null @@ -1,552 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package com.cloudwebrtc.webrtc.utils; - -import android.Manifest; -import android.annotation.SuppressLint; -import android.bluetooth.BluetoothAdapter; -import android.bluetooth.BluetoothDevice; -import android.bluetooth.BluetoothHeadset; -import android.bluetooth.BluetoothProfile; -import android.content.BroadcastReceiver; -import android.content.Context; -import android.content.Intent; -import android.content.IntentFilter; -import android.content.pm.PackageManager; -import android.media.AudioManager; -import android.os.Build; -import android.os.Handler; -import android.os.Looper; -import android.os.Process; -import android.util.Log; - -import org.webrtc.ThreadUtils; - -import java.util.List; -import java.util.Set; - -/** - * RTCProximitySensor manages functions related to Bluetoth devices in the - * RTC demo. - */ -public class RTCBluetoothManager { - private static final String TAG = "RTCBluetoothManager"; - - // Timeout interval for starting or stopping audio to a Bluetooth SCO device. - private static final int BLUETOOTH_SCO_TIMEOUT_MS = 4000; - // Maximum number of SCO connection attempts. - private static final int MAX_SCO_CONNECTION_ATTEMPTS = 2; - - // Bluetooth connection state. - public enum State { - // Bluetooth is not available; no adapter or Bluetooth is off. - UNINITIALIZED, - // Bluetooth error happened when trying to start Bluetooth. - ERROR, - // Bluetooth proxy object for the Headset profile exists, but no connected headset devices, - // SCO is not started or disconnected. - HEADSET_UNAVAILABLE, - // Bluetooth proxy object for the Headset profile connected, connected Bluetooth headset - // present, but SCO is not started or disconnected. - HEADSET_AVAILABLE, - // Bluetooth audio SCO connection with remote device is closing. - SCO_DISCONNECTING, - // Bluetooth audio SCO connection with remote device is initiated. - SCO_CONNECTING, - // Bluetooth audio SCO connection with remote device is established. - SCO_CONNECTED - } - - private final Context apprtcContext; - private final RTCAudioManager apprtcAudioManager; - - private final AudioManager audioManager; - private final Handler handler; - - int scoConnectionAttempts; - private State bluetoothState; - private final BluetoothProfile.ServiceListener bluetoothServiceListener; - - private BluetoothAdapter bluetoothAdapter; - - private BluetoothHeadset bluetoothHeadset; - - private BluetoothDevice bluetoothDevice; - private final BroadcastReceiver bluetoothHeadsetReceiver; - - // Runs when the Bluetooth timeout expires. We use that timeout after calling - // startScoAudio() or stopScoAudio() because we're not guaranteed to get a - // callback after those calls. - private final Runnable bluetoothTimeoutRunnable = new Runnable() { - @Override - public void run() { - bluetoothTimeout(); - } - }; - - /** - * Implementation of an interface that notifies BluetoothProfile IPC clients when they have been - * connected to or disconnected from the service. - */ - private class BluetoothServiceListener implements BluetoothProfile.ServiceListener { - @Override - // Called to notify the client when the proxy object has been connected to the service. - // Once we have the profile proxy object, we can use it to monitor the state of the - // connection and perform other operations that are relevant to the headset profile. - public void onServiceConnected(int profile, BluetoothProfile proxy) { - if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) { - return; - } - Log.d(TAG, "BluetoothServiceListener.onServiceConnected: BT state=" + bluetoothState); - // Android only supports one connected Bluetooth Headset at a time. - bluetoothHeadset = (BluetoothHeadset) proxy; - updateAudioDeviceState(); - Log.d(TAG, "onServiceConnected done: BT state=" + bluetoothState); - } - - @Override - /** Notifies the client when the proxy object has been disconnected from the service. */ - public void onServiceDisconnected(int profile) { - if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) { - return; - } - Log.d(TAG, "BluetoothServiceListener.onServiceDisconnected: BT state=" + bluetoothState); - stopScoAudio(); - bluetoothHeadset = null; - bluetoothDevice = null; - bluetoothState = State.HEADSET_UNAVAILABLE; - updateAudioDeviceState(); - Log.d(TAG, "onServiceDisconnected done: BT state=" + bluetoothState); - } - } - - // Intent broadcast receiver which handles changes in Bluetooth device availability. - // Detects headset changes and Bluetooth SCO state changes. - private class BluetoothHeadsetBroadcastReceiver extends BroadcastReceiver { - @Override - public void onReceive(Context context, Intent intent) { - if (bluetoothState == State.UNINITIALIZED) { - return; - } - final String action = intent.getAction(); - // Change in connection state of the Headset profile. Note that the - // change does not tell us anything about whether we're streaming - // audio to BT over SCO. Typically received when user turns on a BT - // headset while audio is active using another audio device. - if (action.equals(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)) { - final int state = - intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_DISCONNECTED); - Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: " - + "a=ACTION_CONNECTION_STATE_CHANGED, " - + "s=" + stateToString(state) + ", " - + "sb=" + isInitialStickyBroadcast() + ", " - + "BT state: " + bluetoothState); - if (state == BluetoothHeadset.STATE_CONNECTED) { - scoConnectionAttempts = 0; - updateAudioDeviceState(); - } else if (state == BluetoothHeadset.STATE_CONNECTING) { - // No action needed. - } else if (state == BluetoothHeadset.STATE_DISCONNECTING) { - // No action needed. - } else if (state == BluetoothHeadset.STATE_DISCONNECTED) { - // Bluetooth is probably powered off during the call. - stopScoAudio(); - updateAudioDeviceState(); - } - // Change in the audio (SCO) connection state of the Headset profile. - // Typically received after call to startScoAudio() has finalized. - } else if (action.equals(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) { - final int state = intent.getIntExtra( - BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_DISCONNECTED); - Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: " - + "a=ACTION_AUDIO_STATE_CHANGED, " - + "s=" + stateToString(state) + ", " - + "sb=" + isInitialStickyBroadcast() + ", " - + "BT state: " + bluetoothState); - if (state == BluetoothHeadset.STATE_AUDIO_CONNECTED) { - cancelTimer(); - if (bluetoothState == State.SCO_CONNECTING) { - Log.d(TAG, "+++ Bluetooth audio SCO is now connected"); - bluetoothState = State.SCO_CONNECTED; - scoConnectionAttempts = 0; - updateAudioDeviceState(); - } else { - Log.w(TAG, "Unexpected state BluetoothHeadset.STATE_AUDIO_CONNECTED"); - } - } else if (state == BluetoothHeadset.STATE_AUDIO_CONNECTING) { - Log.d(TAG, "+++ Bluetooth audio SCO is now connecting..."); - } else if (state == BluetoothHeadset.STATE_AUDIO_DISCONNECTED) { - Log.d(TAG, "+++ Bluetooth audio SCO is now disconnected"); - if (isInitialStickyBroadcast()) { - Log.d(TAG, "Ignore STATE_AUDIO_DISCONNECTED initial sticky broadcast."); - return; - } - updateAudioDeviceState(); - } - } - Log.d(TAG, "onReceive done: BT state=" + bluetoothState); - } - } - - /** Construction. */ - static RTCBluetoothManager create(Context context, RTCAudioManager audioManager) { - Log.d(TAG, "create" + RTCUtils.getThreadInfo()); - return new RTCBluetoothManager(context, audioManager); - } - - protected RTCBluetoothManager(Context context, RTCAudioManager audioManager) { - Log.d(TAG, "ctor"); - ThreadUtils.checkIsOnMainThread(); - apprtcContext = context; - apprtcAudioManager = audioManager; - this.audioManager = getAudioManager(context); - bluetoothState = State.UNINITIALIZED; - bluetoothServiceListener = new BluetoothServiceListener(); - bluetoothHeadsetReceiver = new BluetoothHeadsetBroadcastReceiver(); - handler = new Handler(Looper.getMainLooper()); - } - - /** Returns the internal state. */ - public State getState() { - ThreadUtils.checkIsOnMainThread(); - return bluetoothState; - } - - /** - * Activates components required to detect Bluetooth devices and to enable - * BT SCO (audio is routed via BT SCO) for the headset profile. The end - * state will be HEADSET_UNAVAILABLE but a state machine has started which - * will start a state change sequence where the final outcome depends on - * if/when the BT headset is enabled. - * Example of state change sequence when start() is called while BT device - * is connected and enabled: - * UNINITIALIZED --> HEADSET_UNAVAILABLE --> HEADSET_AVAILABLE --> - * SCO_CONNECTING --> SCO_CONNECTED <==> audio is now routed via BT SCO. - * Note that the RTCAudioManager is also involved in driving this state - * change. - */ - public void start() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "start"); - - // BLUETOOTH permission is required for API levels below S - if ((Build.VERSION.SDK_INT < Build.VERSION_CODES.S - || apprtcContext.getApplicationInfo().targetSdkVersion < Build.VERSION_CODES.S) - && !hasPermission(apprtcContext, android.Manifest.permission.BLUETOOTH)) { - Log.w(TAG, "Process (pid=" + Process.myPid() + ") lacks BLUETOOTH permission"); - return; - } - - // BLUETOOTH_CONNECT permissions is required for API level S onwards - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S - && apprtcContext.getApplicationInfo().targetSdkVersion >= Build.VERSION_CODES.S - && !hasPermission(apprtcContext, Manifest.permission.BLUETOOTH_CONNECT)) { - Log.w(TAG, "Process (pid=" + Process.myPid() + ") lacks BLUETOOTH_CONNECT permission"); - return; - } - if (bluetoothState != State.UNINITIALIZED) { - Log.w(TAG, "Invalid BT state"); - return; - } - bluetoothHeadset = null; - bluetoothDevice = null; - scoConnectionAttempts = 0; - // Get a handle to the default local Bluetooth adapter. - bluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); - if (bluetoothAdapter == null) { - Log.w(TAG, "Device does not support Bluetooth"); - return; - } - // Ensure that the device supports use of BT SCO audio for off call use cases. - if (!audioManager.isBluetoothScoAvailableOffCall()) { - Log.e(TAG, "Bluetooth SCO audio is not available off call"); - return; - } - logBluetoothAdapterInfo(bluetoothAdapter); - // Establish a connection to the HEADSET profile (includes both Bluetooth Headset and - // Hands-Free) proxy object and install a listener. - if (!getBluetoothProfileProxy( - apprtcContext, bluetoothServiceListener, BluetoothProfile.HEADSET)) { - Log.e(TAG, "BluetoothAdapter.getProfileProxy(HEADSET) failed"); - return; - } - // Register receivers for BluetoothHeadset change notifications. - IntentFilter bluetoothHeadsetFilter = new IntentFilter(); - // Register receiver for change in connection state of the Headset profile. - bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED); - // Register receiver for change in audio connection state of the Headset profile. - bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED); - registerReceiver(bluetoothHeadsetReceiver, bluetoothHeadsetFilter); - Log.d(TAG, "HEADSET profile state: " - + stateToString(bluetoothAdapter.getProfileConnectionState(BluetoothProfile.HEADSET))); - Log.d(TAG, "Bluetooth proxy for headset profile has started"); - bluetoothState = State.HEADSET_UNAVAILABLE; - Log.d(TAG, "start done: BT state=" + bluetoothState); - } - - /** Stops and closes all components related to Bluetooth audio. */ - public void stop() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "stop: BT state=" + bluetoothState); - if (bluetoothAdapter == null) { - return; - } - // Stop BT SCO connection with remote device if needed. - stopScoAudio(); - // Close down remaining BT resources. - if (bluetoothState == State.UNINITIALIZED) { - return; - } - unregisterReceiver(bluetoothHeadsetReceiver); - cancelTimer(); - if (bluetoothHeadset != null) { - bluetoothAdapter.closeProfileProxy(BluetoothProfile.HEADSET, bluetoothHeadset); - bluetoothHeadset = null; - } - bluetoothAdapter = null; - bluetoothDevice = null; - bluetoothState = State.UNINITIALIZED; - Log.d(TAG, "stop done: BT state=" + bluetoothState); - } - - /** - * Starts Bluetooth SCO connection with remote device. - * Note that the phone application always has the priority on the usage of the SCO connection - * for telephony. If this method is called while the phone is in call it will be ignored. - * Similarly, if a call is received or sent while an application is using the SCO connection, - * the connection will be lost for the application and NOT returned automatically when the call - * ends. Also note that: up to and including API version JELLY_BEAN_MR1, this method initiates a - * virtual voice call to the Bluetooth headset. After API version JELLY_BEAN_MR2 only a raw SCO - * audio connection is established. - * TODO(henrika): should we add support for virtual voice call to BT headset also for JBMR2 and - * higher. It might be required to initiates a virtual voice call since many devices do not - * accept SCO audio without a "call". - */ - public boolean startScoAudio() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "startSco: BT state=" + bluetoothState + ", " - + "attempts: " + scoConnectionAttempts + ", " - + "SCO is on: " + isScoOn()); - if (scoConnectionAttempts >= MAX_SCO_CONNECTION_ATTEMPTS) { - Log.e(TAG, "BT SCO connection fails - no more attempts"); - return false; - } - List devices = bluetoothHeadset.getConnectedDevices(); - if (!devices.isEmpty()) { - bluetoothDevice = devices.get(0); - bluetoothState = State.HEADSET_AVAILABLE; - } - - if (bluetoothState != State.HEADSET_AVAILABLE) { - Log.e(TAG, "BT SCO connection fails - no headset available"); - return false; - } - // Start BT SCO channel and wait for ACTION_AUDIO_STATE_CHANGED. - Log.d(TAG, "Starting Bluetooth SCO and waits for ACTION_AUDIO_STATE_CHANGED..."); - // The SCO connection establishment can take several seconds, hence we cannot rely on the - // connection to be available when the method returns but instead register to receive the - // intent ACTION_SCO_AUDIO_STATE_UPDATED and wait for the state to be SCO_AUDIO_STATE_CONNECTED. - bluetoothState = State.SCO_CONNECTING; - audioManager.startBluetoothSco(); - audioManager.setBluetoothScoOn(true); - scoConnectionAttempts++; - startTimer(); - Log.d(TAG, "startScoAudio done: BT state=" + bluetoothState + ", " - + "SCO is on: " + isScoOn()); - return true; - } - - /** Stops Bluetooth SCO connection with remote device. */ - public void stopScoAudio() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "stopScoAudio: BT state=" + bluetoothState + ", " - + "SCO is on: " + isScoOn()); - if (bluetoothState != State.SCO_CONNECTING && bluetoothState != State.SCO_CONNECTED) { - return; - } - cancelTimer(); - audioManager.stopBluetoothSco(); - audioManager.setBluetoothScoOn(false); - bluetoothState = State.SCO_DISCONNECTING; - Log.d(TAG, "stopScoAudio done: BT state=" + bluetoothState + ", " - + "SCO is on: " + isScoOn()); - } - - /** - * Use the BluetoothHeadset proxy object (controls the Bluetooth Headset - * Service via IPC) to update the list of connected devices for the HEADSET - * profile. The internal state will change to HEADSET_UNAVAILABLE or to - * HEADSET_AVAILABLE and |bluetoothDevice| will be mapped to the connected - * device if available. - */ - public void updateDevice() { - if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) { - return; - } - Log.d(TAG, "updateDevice"); - // Get connected devices for the headset profile. Returns the set of - // devices which are in state STATE_CONNECTED. The BluetoothDevice class - // is just a thin wrapper for a Bluetooth hardware address. - List devices = bluetoothHeadset.getConnectedDevices(); - if (devices.isEmpty()) { - bluetoothDevice = null; - bluetoothState = State.HEADSET_UNAVAILABLE; - Log.d(TAG, "No connected bluetooth headset"); - } else { - // Always use first device in list. Android only supports one device. - bluetoothDevice = devices.get(0); - bluetoothState = State.HEADSET_AVAILABLE; - Log.d(TAG, "Connected bluetooth headset: " - + "name=" + bluetoothDevice.getName() + ", " - + "state=" + stateToString(bluetoothHeadset.getConnectionState(bluetoothDevice)) - + ", SCO audio=" + bluetoothHeadset.isAudioConnected(bluetoothDevice)); - } - Log.d(TAG, "updateDevice done: BT state=" + bluetoothState); - } - - /** - * Stubs for test mocks. - */ - - protected AudioManager getAudioManager(Context context) { - return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); - } - - protected void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) { - apprtcContext.registerReceiver(receiver, filter); - } - - protected void unregisterReceiver(BroadcastReceiver receiver) { - apprtcContext.unregisterReceiver(receiver); - } - - protected boolean getBluetoothProfileProxy( - Context context, BluetoothProfile.ServiceListener listener, int profile) { - return bluetoothAdapter.getProfileProxy(context, listener, profile); - } - - protected boolean hasPermission(Context context, String permission) { - return apprtcContext.checkPermission(permission, Process.myPid(), Process.myUid()) - == PackageManager.PERMISSION_GRANTED; - } - - /** Logs the state of the local Bluetooth adapter. */ - @SuppressLint("HardwareIds") - protected void logBluetoothAdapterInfo(BluetoothAdapter localAdapter) { - Log.d(TAG, "BluetoothAdapter: " - + "enabled=" + localAdapter.isEnabled() + ", " - + "state=" + stateToString(localAdapter.getState()) + ", " - + "name=" + localAdapter.getName() + ", " - + "address=" + localAdapter.getAddress()); - // Log the set of BluetoothDevice objects that are bonded (paired) to the local adapter. - Set pairedDevices = localAdapter.getBondedDevices(); - if (!pairedDevices.isEmpty()) { - Log.d(TAG, "paired devices:"); - for (BluetoothDevice device : pairedDevices) { - Log.d(TAG, " name=" + device.getName() + ", address=" + device.getAddress()); - } - } - } - - /** Ensures that the audio manager updates its list of available audio devices. */ - private void updateAudioDeviceState() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "updateAudioDeviceState"); - apprtcAudioManager.updateAudioDeviceState(); - } - - /** Starts timer which times out after BLUETOOTH_SCO_TIMEOUT_MS milliseconds. */ - private void startTimer() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "startTimer"); - handler.postDelayed(bluetoothTimeoutRunnable, BLUETOOTH_SCO_TIMEOUT_MS); - } - - /** Cancels any outstanding timer tasks. */ - private void cancelTimer() { - ThreadUtils.checkIsOnMainThread(); - Log.d(TAG, "cancelTimer"); - handler.removeCallbacks(bluetoothTimeoutRunnable); - } - - /** - * Called when start of the BT SCO channel takes too long time. Usually - * happens when the BT device has been turned on during an ongoing call. - */ - private void bluetoothTimeout() { - ThreadUtils.checkIsOnMainThread(); - if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) { - return; - } - Log.d(TAG, "bluetoothTimeout: BT state=" + bluetoothState + ", " - + "attempts: " + scoConnectionAttempts + ", " - + "SCO is on: " + isScoOn()); - if (bluetoothState != State.SCO_CONNECTING) { - return; - } - // Bluetooth SCO should be connecting; check the latest result. - boolean scoConnected = false; - List devices = bluetoothHeadset.getConnectedDevices(); - if (devices.size() > 0) { - bluetoothDevice = devices.get(0); - if (bluetoothHeadset.isAudioConnected(bluetoothDevice)) { - Log.d(TAG, "SCO connected with " + bluetoothDevice.getName()); - scoConnected = true; - } else { - Log.d(TAG, "SCO is not connected with " + bluetoothDevice.getName()); - } - } - if (scoConnected) { - // We thought BT had timed out, but it's actually on; updating state. - bluetoothState = State.SCO_CONNECTED; - scoConnectionAttempts = 0; - } else { - // Give up and "cancel" our request by calling stopBluetoothSco(). - Log.w(TAG, "BT failed to connect after timeout"); - stopScoAudio(); - } - updateAudioDeviceState(); - Log.d(TAG, "bluetoothTimeout done: BT state=" + bluetoothState); - } - - /** Checks whether audio uses Bluetooth SCO. */ - private boolean isScoOn() { - return audioManager.isBluetoothScoOn(); - } - - /** Converts BluetoothAdapter states into local string representations. */ - private String stateToString(int state) { - switch (state) { - case BluetoothAdapter.STATE_DISCONNECTED: - return "DISCONNECTED"; - case BluetoothAdapter.STATE_CONNECTED: - return "CONNECTED"; - case BluetoothAdapter.STATE_CONNECTING: - return "CONNECTING"; - case BluetoothAdapter.STATE_DISCONNECTING: - return "DISCONNECTING"; - case BluetoothAdapter.STATE_OFF: - return "OFF"; - case BluetoothAdapter.STATE_ON: - return "ON"; - case BluetoothAdapter.STATE_TURNING_OFF: - // Indicates the local Bluetooth adapter is turning off. Local clients should immediately - // attempt graceful disconnection of any remote links. - return "TURNING_OFF"; - case BluetoothAdapter.STATE_TURNING_ON: - // Indicates the local Bluetooth adapter is turning on. However local clients should wait - // for STATE_ON before attempting to use the adapter. - return "TURNING_ON"; - default: - return "INVALID"; - } - } -} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCProximitySensor.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCProximitySensor.java deleted file mode 100644 index 6cfa3bb4a4..0000000000 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCProximitySensor.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package com.cloudwebrtc.webrtc.utils; - -import android.content.Context; -import android.hardware.Sensor; -import android.hardware.SensorEvent; -import android.hardware.SensorEventListener; -import android.hardware.SensorManager; -import android.os.Build; -import android.util.Log; -import com.cloudwebrtc.webrtc.utils.RTCUtils; -import org.webrtc.ThreadUtils; - -/** - * RTCProximitySensor manages functions related to the proximity sensor in - * the RTC demo. - * On most device, the proximity sensor is implemented as a boolean-sensor. - * It returns just two values "NEAR" or "FAR". Thresholding is done on the LUX - * value i.e. the LUX value of the light sensor is compared with a threshold. - * A LUX-value more than the threshold means the proximity sensor returns "FAR". - * Anything less than the threshold value and the sensor returns "NEAR". - */ -public class RTCProximitySensor implements SensorEventListener { - private static final String TAG = "RTCProximitySensor"; - - // This class should be created, started and stopped on one thread - // (e.g. the main thread). We use |nonThreadSafe| to ensure that this is - // the case. Only active when |DEBUG| is set to true. - private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); - - private final Runnable onSensorStateListener; - private final SensorManager sensorManager; - private Sensor proximitySensor; - private boolean lastStateReportIsNear; - - /** Construction */ - static RTCProximitySensor create(Context context, Runnable sensorStateListener) { - return new RTCProximitySensor(context, sensorStateListener); - } - - private RTCProximitySensor(Context context, Runnable sensorStateListener) { - Log.d(TAG, "RTCProximitySensor" + RTCUtils.getThreadInfo()); - onSensorStateListener = sensorStateListener; - sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE)); - } - - /** - * Activate the proximity sensor. Also do initialization if called for the - * first time. - */ - public boolean start() { - threadChecker.checkIsOnValidThread(); - Log.d(TAG, "start" + RTCUtils.getThreadInfo()); - if (!initDefaultSensor()) { - // Proximity sensor is not supported on this device. - return false; - } - sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL); - return true; - } - - /** Deactivate the proximity sensor. */ - public void stop() { - threadChecker.checkIsOnValidThread(); - Log.d(TAG, "stop" + RTCUtils.getThreadInfo()); - if (proximitySensor == null) { - return; - } - sensorManager.unregisterListener(this, proximitySensor); - } - - /** Getter for last reported state. Set to true if "near" is reported. */ - public boolean sensorReportsNearState() { - threadChecker.checkIsOnValidThread(); - return lastStateReportIsNear; - } - - @Override - public final void onAccuracyChanged(Sensor sensor, int accuracy) { - threadChecker.checkIsOnValidThread(); - RTCUtils.assertIsTrue(sensor.getType() == Sensor.TYPE_PROXIMITY); - if (accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE) { - Log.e(TAG, "The values returned by this sensor cannot be trusted"); - } - } - - @Override - public final void onSensorChanged(SensorEvent event) { - threadChecker.checkIsOnValidThread(); - RTCUtils.assertIsTrue(event.sensor.getType() == Sensor.TYPE_PROXIMITY); - // As a best practice; do as little as possible within this method and - // avoid blocking. - float distanceInCentimeters = event.values[0]; - if (distanceInCentimeters < proximitySensor.getMaximumRange()) { - Log.d(TAG, "Proximity sensor => NEAR state"); - lastStateReportIsNear = true; - } else { - Log.d(TAG, "Proximity sensor => FAR state"); - lastStateReportIsNear = false; - } - - // Report about new state to listening client. Client can then call - // sensorReportsNearState() to query the current state (NEAR or FAR). - if (onSensorStateListener != null) { - onSensorStateListener.run(); - } - - Log.d(TAG, "onSensorChanged" + RTCUtils.getThreadInfo() + ": " - + "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance=" - + event.values[0]); - } - - /** - * Get default proximity sensor if it exists. Tablet devices (e.g. Nexus 7) - * does not support this type of sensor and false will be returned in such - * cases. - */ - private boolean initDefaultSensor() { - if (proximitySensor != null) { - return true; - } - proximitySensor = sensorManager.getDefaultSensor(Sensor.TYPE_PROXIMITY); - if (proximitySensor == null) { - return false; - } - logProximitySensorInfo(); - return true; - } - - /** Helper method for logging information about the proximity sensor. */ - private void logProximitySensorInfo() { - if (proximitySensor == null) { - return; - } - StringBuilder info = new StringBuilder("Proximity sensor: "); - info.append("name=").append(proximitySensor.getName()); - info.append(", vendor: ").append(proximitySensor.getVendor()); - info.append(", power: ").append(proximitySensor.getPower()); - info.append(", resolution: ").append(proximitySensor.getResolution()); - info.append(", max range: ").append(proximitySensor.getMaximumRange()); - info.append(", min delay: ").append(proximitySensor.getMinDelay()); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH) { - // Added in API level 20. - info.append(", type: ").append(proximitySensor.getStringType()); - } - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - // Added in API level 21. - info.append(", max delay: ").append(proximitySensor.getMaxDelay()); - info.append(", reporting mode: ").append(proximitySensor.getReportingMode()); - info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor()); - } - Log.d(TAG, info.toString()); - } -} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCUtils.java b/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCUtils.java deleted file mode 100644 index 1b55ebdf80..0000000000 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/RTCUtils.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package com.cloudwebrtc.webrtc.utils; - -import android.os.Build; -import android.util.Log; - -/** - * RTCUtils provides helper functions for managing thread safety. - */ -public final class RTCUtils { - private RTCUtils() {} - - /** Helper method which throws an exception when an assertion has failed. */ - public static void assertIsTrue(boolean condition) { - if (!condition) { - throw new AssertionError("Expected condition to be true"); - } - } - - /** Helper method for building a string of thread information.*/ - public static String getThreadInfo() { - return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId() - + "]"; - } - - /** Information about the current build, taken from system properties. */ - public static void logDeviceInfo(String tag) { - Log.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", " - + "Release: " + Build.VERSION.RELEASE + ", " - + "Brand: " + Build.BRAND + ", " - + "Device: " + Build.DEVICE + ", " - + "Id: " + Build.ID + ", " - + "Hardware: " + Build.HARDWARE + ", " - + "Manufacturer: " + Build.MANUFACTURER + ", " - + "Model: " + Build.MODEL + ", " - + "Product: " + Build.PRODUCT); - } -} diff --git a/common/cpp/include/flutter_media_stream.h b/common/cpp/include/flutter_media_stream.h index 0c28c055f9..0a73543824 100644 --- a/common/cpp/include/flutter_media_stream.h +++ b/common/cpp/include/flutter_media_stream.h @@ -9,7 +9,7 @@ using namespace flutter; class FlutterMediaStream { public: - FlutterMediaStream(FlutterWebRTCBase *base) : base_(base) {} + FlutterMediaStream(FlutterWebRTCBase *base); void GetUserMedia(const EncodableMap &constraints, std::unique_ptr> result); @@ -22,6 +22,12 @@ class FlutterMediaStream { void GetSources(std::unique_ptr> result); + void SelectAudioOutput(const std::string& device_id, + std::unique_ptr> result); + + void SelectAudioInput(const std::string& device_id, + std::unique_ptr> result); + void MediaStreamGetTracks( const std::string &stream_id, std::unique_ptr> result); @@ -43,6 +49,8 @@ class FlutterMediaStream { void CreateLocalMediaStream(std::unique_ptr> result); + void OnDeviceChange(); + private: FlutterWebRTCBase *base_; }; diff --git a/common/cpp/include/flutter_screen_capture.h b/common/cpp/include/flutter_screen_capture.h index 5d8461634e..3a50696837 100644 --- a/common/cpp/include/flutter_screen_capture.h +++ b/common/cpp/include/flutter_screen_capture.h @@ -47,8 +47,6 @@ class FlutterScreenCapture : public MediaListObserver, public DesktopCapturerObs private: FlutterWebRTCBase *base_; - std::unique_ptr> event_channel_; - std::unique_ptr> event_sink_; std::map> medialist_; diff --git a/common/cpp/include/flutter_webrtc_base.h b/common/cpp/include/flutter_webrtc_base.h index 69cc097a9f..047a19f42e 100644 --- a/common/cpp/include/flutter_webrtc_base.h +++ b/common/cpp/include/flutter_webrtc_base.h @@ -163,6 +163,8 @@ class FlutterWebRTCBase { void RemoveTracksForId(const std::string& id); + EventSink *event_sink(); + private: void ParseConstraints(const EncodableMap& src, scoped_refptr mediaConstraints, @@ -194,6 +196,8 @@ class FlutterWebRTCBase { protected: BinaryMessenger* messenger_; TextureRegistrar* textures_; + std::unique_ptr> event_channel_; + std::unique_ptr> event_sink_; }; } // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_media_stream.cc b/common/cpp/src/flutter_media_stream.cc index a5d75ed5ee..2aa662b227 100644 --- a/common/cpp/src/flutter_media_stream.cc +++ b/common/cpp/src/flutter_media_stream.cc @@ -6,6 +6,17 @@ namespace flutter_webrtc_plugin { +FlutterMediaStream::FlutterMediaStream(FlutterWebRTCBase* base) + : base_(base) { + base_->audio_device_->OnDeviceChange([&]{ + if (base_->event_sink()) { + EncodableMap info; + info[EncodableValue("event")] = "onDeviceChange"; + base_->event_sink()->Success(EncodableValue(info)); + } + }); +} + void FlutterMediaStream::GetUserMedia( const EncodableMap& constraints, std::unique_ptr> result) { @@ -323,6 +334,48 @@ void FlutterMediaStream::GetSources( result->Success(EncodableValue(params)); } +void FlutterMediaStream::SelectAudioOutput(const std::string& device_id, + std::unique_ptr> result) { + char strPlayoutName[256]; + char strPlayoutGuid[256]; + int playout_devices = base_->audio_device_->PlayoutDevices(); + bool found = false; + for (uint16_t i = 0; i < playout_devices; i++) { + base_->audio_device_->PlayoutDeviceName(i, strPlayoutName, strPlayoutGuid); + if (device_id != "" && device_id == strPlayoutGuid) { + base_->audio_device_->SetPlayoutDevice(i); + found = true; + break; + } + } + if(!found) { + result->Error("Bad Arguments", "Not found device id: " + device_id); + return; + } + result->Success(); +} + +void FlutterMediaStream::SelectAudioInput(const std::string& device_id, + std::unique_ptr> result) { + char strPlayoutName[256]; + char strPlayoutGuid[256]; + int playout_devices = base_->audio_device_->RecordingDevices(); + bool found = false; + for (uint16_t i = 0; i < playout_devices; i++) { + base_->audio_device_->RecordingDeviceName(i, strPlayoutName, strPlayoutGuid); + if (device_id != "" && device_id == strPlayoutGuid) { + base_->audio_device_->SetRecordingDevice(i); + found = true; + break; + } + } + if(!found) { + result->Error("Bad Arguments", "Not found device id: " + device_id); + return; + } + result->Success(); +} + void FlutterMediaStream::MediaStreamGetTracks( const std::string& stream_id, std::unique_ptr> result) { diff --git a/common/cpp/src/flutter_screen_capture.cc b/common/cpp/src/flutter_screen_capture.cc index 7bb8dfb48e..d0d99d9267 100644 --- a/common/cpp/src/flutter_screen_capture.cc +++ b/common/cpp/src/flutter_screen_capture.cc @@ -4,24 +4,6 @@ namespace flutter_webrtc_plugin { FlutterScreenCapture::FlutterScreenCapture(FlutterWebRTCBase* base) : base_(base) { - std::string event_channel = "FlutterWebRTC/desktopSourcesEvent"; - event_channel_.reset(new EventChannel( - base_->messenger_, event_channel, &StandardMethodCodec::GetInstance())); - - auto handler = std::make_unique>( - [&](const flutter::EncodableValue* arguments, - std::unique_ptr>&& events) - -> std::unique_ptr> { - event_sink_ = std::move(events); - return nullptr; - }, - [&](const flutter::EncodableValue* arguments) - -> std::unique_ptr> { - event_sink_ = nullptr; - return nullptr; - }); - - event_channel_->SetStreamHandler(std::move(handler)); } bool FlutterScreenCapture::BuildDesktopSourcesList(const EncodableList& types, bool force_reload) { @@ -100,7 +82,7 @@ void FlutterScreenCapture::OnMediaSourceAdded( std::cout << " OnMediaSourceAdded: " << source->id().std_string() << std::endl; - if (event_sink_) { + if (base_->event_sink()) { EncodableMap info; info[EncodableValue("event")] = "desktopSourceAdded"; info[EncodableValue("id")] = EncodableValue(source->id().std_string()); @@ -112,7 +94,7 @@ void FlutterScreenCapture::OnMediaSourceAdded( {EncodableValue("width"), EncodableValue(0)}, {EncodableValue("height"), EncodableValue(0)}, }; - event_sink_->Success(EncodableValue(info)); + base_->event_sink()->Success(EncodableValue(info)); } } @@ -120,11 +102,11 @@ void FlutterScreenCapture::OnMediaSourceRemoved( scoped_refptr source) { std::cout << " OnMediaSourceRemoved: " << source->id().std_string() << std::endl; - if (event_sink_) { + if (base_->event_sink()) { EncodableMap info; info[EncodableValue("event")] = "desktopSourceRemoved"; info[EncodableValue("id")] = EncodableValue(source->id().std_string()); - event_sink_->Success(EncodableValue(info)); + base_->event_sink()->Success(EncodableValue(info)); } } @@ -132,12 +114,12 @@ void FlutterScreenCapture::OnMediaSourceNameChanged( scoped_refptr source) { std::cout << " OnMediaSourceNameChanged: " << source->id().std_string() << std::endl; - if (event_sink_) { + if (base_->event_sink()) { EncodableMap info; info[EncodableValue("event")] = "desktopSourceNameChanged"; info[EncodableValue("id")] = EncodableValue(source->id().std_string()); info[EncodableValue("name")] = EncodableValue(source->name().std_string()); - event_sink_->Success(EncodableValue(info)); + base_->event_sink()->Success(EncodableValue(info)); } } @@ -145,13 +127,13 @@ void FlutterScreenCapture::OnMediaSourceThumbnailChanged( scoped_refptr source) { std::cout << " OnMediaSourceThumbnailChanged: " << source->id().std_string() << std::endl; - if (event_sink_) { + if (base_->event_sink()) { EncodableMap info; info[EncodableValue("event")] = "desktopSourceThumbnailChanged"; info[EncodableValue("id")] = EncodableValue(source->id().std_string()); info[EncodableValue("thumbnail")] = EncodableValue(source->thumbnail().std_vector()); - event_sink_->Success(EncodableValue(info)); + base_->event_sink()->Success(EncodableValue(info)); } } diff --git a/common/cpp/src/flutter_webrtc.cc b/common/cpp/src/flutter_webrtc.cc index 3186f58a37..ed41f9ed3a 100644 --- a/common/cpp/src/flutter_webrtc.cc +++ b/common/cpp/src/flutter_webrtc.cc @@ -102,6 +102,16 @@ void FlutterWebRTC::HandleMethodCall( } } else if (method_call.method_name().compare("getSources") == 0) { GetSources(std::move(result)); + } else if (method_call.method_name().compare("selectAudioInput") == 0) { + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string deviceId = findString(params, "deviceId"); + SelectAudioInput(deviceId, std::move(result)); + } else if (method_call.method_name().compare("selectAudioOutput") == 0) { + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string deviceId = findString(params, "deviceId"); + SelectAudioOutput(deviceId, std::move(result)); } else if (method_call.method_name().compare("mediaStreamGetTracks") == 0) { if (!method_call.arguments()) { result->Error("Bad Arguments", "Null constraints arguments received"); diff --git a/common/cpp/src/flutter_webrtc_base.cc b/common/cpp/src/flutter_webrtc_base.cc index 644394b7fd..3d49da5f7b 100644 --- a/common/cpp/src/flutter_webrtc_base.cc +++ b/common/cpp/src/flutter_webrtc_base.cc @@ -5,6 +5,8 @@ namespace flutter_webrtc_plugin { +const char *kEventChannelName = "FlutterWebRTC.Event"; + FlutterWebRTCBase::FlutterWebRTCBase(BinaryMessenger *messenger, TextureRegistrar *textures) : messenger_(messenger), textures_(textures) { @@ -13,12 +15,34 @@ FlutterWebRTCBase::FlutterWebRTCBase(BinaryMessenger *messenger, audio_device_ = factory_->GetAudioDevice(); video_device_ = factory_->GetVideoDevice(); desktop_device_ = factory_->GetDesktopDevice(); + + event_channel_.reset(new EventChannel( + messenger_, kEventChannelName, &StandardMethodCodec::GetInstance())); + + auto handler = std::make_unique>( + [&](const flutter::EncodableValue* arguments, + std::unique_ptr>&& events) + -> std::unique_ptr> { + event_sink_ = std::move(events); + return nullptr; + }, + [&](const flutter::EncodableValue* arguments) + -> std::unique_ptr> { + event_sink_ = nullptr; + return nullptr; + }); + + event_channel_->SetStreamHandler(std::move(handler)); } FlutterWebRTCBase::~FlutterWebRTCBase() { LibWebRTC::Terminate(); } +EventSink *FlutterWebRTCBase::event_sink() { + return event_sink_? event_sink_.get() : nullptr; +} + std::string FlutterWebRTCBase::GenerateUUID() { return uuidxx::uuid::Generate().ToString(false); } diff --git a/common/darwin/Classes/AudioUtils.h b/common/darwin/Classes/AudioUtils.h index 552a34091c..f5aa256215 100644 --- a/common/darwin/Classes/AudioUtils.h +++ b/common/darwin/Classes/AudioUtils.h @@ -1,7 +1,10 @@ +#import + @interface AudioUtils : NSObject + (void)ensureAudioSessionWithRecording:(BOOL)recording; // needed for wired headphones to use headphone mic -+ (void)setPreferHeadphoneInput; ++ (BOOL)selectAudioInput:(AVAudioSessionPort)type; ++ (void)setSpeakerphoneOn:(BOOL)enable; @end diff --git a/common/darwin/Classes/AudioUtils.m b/common/darwin/Classes/AudioUtils.m index d0ce2a84f1..f2b14e56e9 100644 --- a/common/darwin/Classes/AudioUtils.m +++ b/common/darwin/Classes/AudioUtils.m @@ -1,5 +1,4 @@ #import "AudioUtils.h" -#import #if TARGET_OS_IPHONE #import @@ -17,8 +16,7 @@ + (void)ensureAudioSessionWithRecording:(BOOL)recording { if (recording && session.category != AVAudioSessionCategoryPlayAndRecord && session.category != AVAudioSessionCategoryMultiRoute) { config.category = AVAudioSessionCategoryPlayAndRecord; - config.categoryOptions = AVAudioSessionCategoryOptionDefaultToSpeaker | - AVAudioSessionCategoryOptionAllowBluetooth | + config.categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth | AVAudioSessionCategoryOptionAllowBluetoothA2DP; [session lockForConfiguration]; @@ -43,20 +41,41 @@ + (void)ensureAudioSessionWithRecording:(BOOL)recording { #endif } -+ (void)setPreferHeadphoneInput { ++ (BOOL)selectAudioInput:(AVAudioSessionPort)type { #if TARGET_OS_IPHONE - AVAudioSession *session = [AVAudioSession sharedInstance]; + RTCAudioSession *rtcSession = [RTCAudioSession sharedInstance]; AVAudioSessionPortDescription *inputPort = nil; - for (AVAudioSessionPortDescription *port in session.availableInputs) { - if ([port.portType isEqualToString:AVAudioSessionPortHeadphones]) { + for (AVAudioSessionPortDescription *port in rtcSession.session.availableInputs) { + if ([port.portType isEqualToString:type]) { inputPort = port; break; } } if (inputPort != nil) { - [session setPreferredInput:inputPort error:nil]; + NSError *errOut = nil; + [rtcSession lockForConfiguration]; + [rtcSession setPreferredInput:inputPort error:&errOut]; + [rtcSession unlockForConfiguration]; + if(errOut != nil) { + return NO; + } + return YES; } #endif + return NO; } -@end \ No newline at end of file ++ (void)setSpeakerphoneOn:(BOOL)enable { +#if TARGET_OS_IPHONE + RTCAudioSession *session = [RTCAudioSession sharedInstance]; + [session lockForConfiguration]; + [session setCategory:AVAudioSessionCategoryPlayAndRecord + withOptions:enable ? AVAudioSessionCategoryOptionDefaultToSpeaker + : + AVAudioSessionCategoryOptionAllowBluetooth | AVAudioSessionCategoryOptionAllowBluetoothA2DP + error:nil]; + [session setActive:YES error:nil]; + [session unlockForConfiguration]; +#endif +} +@end diff --git a/common/darwin/Classes/FlutterRTCDesktopCapturer.h b/common/darwin/Classes/FlutterRTCDesktopCapturer.h index eb7f60037b..424562649c 100644 --- a/common/darwin/Classes/FlutterRTCDesktopCapturer.h +++ b/common/darwin/Classes/FlutterRTCDesktopCapturer.h @@ -22,7 +22,4 @@ -(void)getDesktopSourceThumbnail:(nonnull NSDictionary *)argsMap result:(nonnull FlutterResult)result; -#if TARGET_OS_OSX --(void)enableDesktopCapturerEventChannel:(nonnull NSObject *)messenger; -#endif @end \ No newline at end of file diff --git a/common/darwin/Classes/FlutterRTCDesktopCapturer.m b/common/darwin/Classes/FlutterRTCDesktopCapturer.m index 02dc9be512..3772eac1bc 100644 --- a/common/darwin/Classes/FlutterRTCDesktopCapturer.m +++ b/common/darwin/Classes/FlutterRTCDesktopCapturer.m @@ -12,8 +12,6 @@ RTCDesktopMediaList *_screen = nil; RTCDesktopMediaList *_window = nil; NSArray* _captureSources; -FlutterEventSink _eventSink = nil; -FlutterEventChannel* _eventChannel = nil; #endif @implementation FlutterWebRTCPlugin (DesktopCapturer) @@ -328,43 +326,19 @@ - (BOOL)buildDesktopSourcesListWithTypes:(NSArray *)types forceReload:(BOOL)forc return YES; } --(void) enableDesktopCapturerEventChannel:(nonnull NSObject *)messenger { - if(_eventChannel == nil) { - _eventChannel = [FlutterEventChannel - eventChannelWithName:@"FlutterWebRTC/desktopSourcesEvent" - binaryMessenger:messenger]; - [_eventChannel setStreamHandler:self]; - } -} - -#pragma mark - FlutterStreamHandler methods - -#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" -- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { - _eventSink = nil; - return nil; -} - -#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" -- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments - eventSink:(nonnull FlutterEventSink)sink { - _eventSink = sink; - return nil; -} - #pragma mark - RTCDesktopMediaListDelegate delegate #pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" - (void)didDesktopSourceAdded:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { //NSLog(@"didDesktopSourceAdded: %@, id %@", source.name, source.sourceId); - if(_eventSink) { + if(self.eventSink) { NSImage *image = [source UpdateThumbnail]; NSData *data = [[NSData alloc] init]; if(image != nil) { NSImage *resizedImg = [self resizeImage:image forSize:NSMakeSize(320, 180)]; data = [resizedImg TIFFRepresentation]; } - _eventSink(@{ + self.eventSink(@{ @"event": @"desktopSourceAdded", @"id": source.sourceId, @"name": source.name, @@ -378,8 +352,8 @@ - (void)didDesktopSourceAdded:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { #pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" - (void)didDesktopSourceRemoved:(RTC_OBJC_TYPE(RTCDesktopSource) *) source { //NSLog(@"didDesktopSourceRemoved: %@, id %@", source.name, source.sourceId); - if(_eventSink) { - _eventSink(@{ + if(self.eventSink) { + self.eventSink(@{ @"event": @"desktopSourceRemoved", @"id": source.sourceId, }); @@ -389,8 +363,8 @@ - (void)didDesktopSourceRemoved:(RTC_OBJC_TYPE(RTCDesktopSource) *) source { #pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" - (void)didDesktopSourceNameChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source { //NSLog(@"didDesktopSourceNameChanged: %@, id %@", source.name, source.sourceId); - if(_eventSink) { - _eventSink(@{ + if(self.eventSink) { + self.eventSink(@{ @"event": @"desktopSourceNameChanged", @"id": source.sourceId, @"name": source.name, @@ -401,10 +375,10 @@ - (void)didDesktopSourceNameChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source { #pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" - (void)didDesktopSourceThumbnailChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source { //NSLog(@"didDesktopSourceThumbnailChanged: %@, id %@", source.name, source.sourceId); - if(_eventSink) { + if(self.eventSink) { NSImage *resizedImg = [self resizeImage:[source thumbnail] forSize:NSMakeSize(320, 180)]; NSData *data = [resizedImg TIFFRepresentation]; - _eventSink(@{ + self.eventSink(@{ @"event": @"desktopSourceThumbnailChanged", @"id": source.sourceId, @"thumbnail": data diff --git a/common/darwin/Classes/FlutterRTCMediaStream.h b/common/darwin/Classes/FlutterRTCMediaStream.h index bdcaa2e6b9..316023d90b 100644 --- a/common/darwin/Classes/FlutterRTCMediaStream.h +++ b/common/darwin/Classes/FlutterRTCMediaStream.h @@ -23,4 +23,10 @@ -(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track toPath:(NSString *) path result:(FlutterResult) result; + +-(void)selectAudioInput:(NSString *)deviceId + result:(FlutterResult) result; + +-(void)selectAudioOutput:(NSString *)deviceId + result:(FlutterResult) result; @end diff --git a/common/darwin/Classes/FlutterRTCMediaStream.m b/common/darwin/Classes/FlutterRTCMediaStream.m index 14d7a08c8a..c7c92a65a9 100755 --- a/common/darwin/Classes/FlutterRTCMediaStream.m +++ b/common/darwin/Classes/FlutterRTCMediaStream.m @@ -494,28 +494,143 @@ -(void)createLocalMediaStream:(FlutterResult)result{ } -(void)getSources:(FlutterResult)result{ - NSMutableArray *sources = [NSMutableArray array]; - NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; - for (AVCaptureDevice *device in videoDevices) { - [sources addObject:@{ - @"facing": device.positionString, - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"videoinput", - }]; - } - NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; - for (AVCaptureDevice *device in audioDevices) { - [sources addObject:@{ - @"facing": @"", - @"deviceId": device.uniqueID, - @"label": device.localizedName, - @"kind": @"audioinput", - }]; - } + NSMutableArray *sources = [NSMutableArray array]; + NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + for (AVCaptureDevice *device in videoDevices) { + [sources addObject:@{ + @"facing": device.positionString, + @"deviceId": device.uniqueID, + @"label": device.localizedName, + @"kind": @"videoinput", + }]; + } +#if TARGET_OS_IPHONE + RTCAudioSession *session = [RTCAudioSession sharedInstance]; + NSError *setCategoryError = nil; + [session.session setCategory:AVAudioSessionCategoryPlayAndRecord mode:AVAudioSessionModeVideoChat options:AVAudioSessionCategoryOptionAllowBluetooth error:&setCategoryError]; + [session setActive:YES error:&setCategoryError]; + for (AVAudioSessionPortDescription *port in session.session.availableInputs) { + //NSLog(@"input portName: %@, type %@", port.portName,port.portType); + [sources addObject:@{ + @"facing": @"", + @"deviceId": port.UID, + @"label": port.portName, + @"kind": @"audioinput", + }]; + } + for (AVAudioSessionPortDescription *port in session.currentRoute.outputs) { + //NSLog(@"output portName: %@, type %@", port.portName,port.portType); + if(session.currentRoute.outputs.count == 1 && ![port.UID isEqualToString:@"Speaker"]) { + [sources addObject:@{ + @"facing": @"", + @"deviceId": @"Speaker", + @"label": @"Speaker", + @"kind": @"audiooutput", + }]; + } + [sources addObject:@{ + @"facing": @"", + @"deviceId": port.UID, + @"label": port.portName, + @"kind": @"audiooutput", + }]; + } +#endif +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + + NSArray *inputDevices = [audioDeviceModule inputDevices]; + for (RTCAudioDevice *device in inputDevices) { + [sources addObject:@{ + @"facing": @"", + @"deviceId": device.deviceId, + @"label": device.name, + @"kind": @"audioinput", + }]; + } + + NSArray *outputDevices = [audioDeviceModule outputDevices]; + for (RTCAudioDevice *device in outputDevices) { + [sources addObject:@{ + @"facing": @"", + @"deviceId": device.deviceId, + @"label": device.name, + @"kind": @"audiooutput", + }]; + } +#endif result(@{@"sources": sources}); } +-(void)selectAudioInput:(NSString *)deviceId + result:(FlutterResult) result { +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + NSArray *inputDevices = [audioDeviceModule inputDevices]; + for (RTCAudioDevice *device in inputDevices) { + if([deviceId isEqualToString:device.deviceId]){ + [audioDeviceModule setInputDevice:device]; + result(nil); + return; + } + } +#endif +#if TARGET_OS_IPHONE + RTCAudioSession *session = [RTCAudioSession sharedInstance]; + for (AVAudioSessionPortDescription *port in session.session.availableInputs) { + if([port.UID isEqualToString:deviceId]) { + if(self.preferredInput != port.portType) { + self.preferredInput = port.portType; + [AudioUtils selectAudioInput:self.preferredInput]; + } + break; + } + } + result(nil); +#endif + result([FlutterError errorWithCode:@"selectAudioInputFailed" + message:[NSString stringWithFormat:@"Error: deviceId not found!"] + details:nil]); +} + +-(void)selectAudioOutput:(NSString *)deviceId + result:(FlutterResult) result { +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + NSArray *outputDevices = [audioDeviceModule outputDevices]; + for (RTCAudioDevice *device in outputDevices) { + if([deviceId isEqualToString:device.deviceId]){ + [audioDeviceModule setOutputDevice:device]; + result(nil); + return; + } + } +#endif +#if TARGET_OS_IPHONE + RTCAudioSession *session = [RTCAudioSession sharedInstance]; + NSError *setCategoryError = nil; + + if([deviceId isEqualToString:@"Speaker"]) { + [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_Speaker error:&setCategoryError]; + } else { + [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None error:&setCategoryError]; + } + + if(setCategoryError == nil) { + result(nil); + return; + } + + result([FlutterError errorWithCode:@"selectAudioOutputFailed" + message:[NSString stringWithFormat:@"Error: %@", [setCategoryError localizedFailureReason]] + details:nil]); + +#endif + result([FlutterError errorWithCode:@"selectAudioOutputFailed" + message:[NSString stringWithFormat:@"Error: deviceId not found!"] + details:nil]); +} + -(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track { // what's different to mediaStreamTrackStop? only call mediaStream explicitly? diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.h b/common/darwin/Classes/FlutterWebRTCPlugin.h index 32310a82b4..1c56560f78 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.h +++ b/common/darwin/Classes/FlutterWebRTCPlugin.h @@ -14,10 +14,9 @@ typedef void (^CompletionHandler)(void); typedef void (^CapturerStopHandler)(CompletionHandler handler); -@interface FlutterWebRTCPlugin : NSObject @@ -32,9 +31,11 @@ RTCPeerConnectionDelegate @property (nonatomic, retain) UIViewController *viewController;/*for broadcast or ReplayKit */ #endif +@property (nonatomic, strong) FlutterEventSink eventSink; @property (nonatomic, strong) NSObject* messenger; @property (nonatomic, strong) RTCCameraVideoCapturer *videoCapturer; @property (nonatomic, strong) FlutterRTCFrameCapturer *frameCapturer; +@property (nonatomic, strong) AVAudioSessionPort preferredInput; @property (nonatomic) BOOL _usingFrontCamera; @property (nonatomic) int _targetWidth; @property (nonatomic) int _targetHeight; diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.m b/common/darwin/Classes/FlutterWebRTCPlugin.m index d0d4400f12..e8483f173d 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.m +++ b/common/darwin/Classes/FlutterWebRTCPlugin.m @@ -13,17 +13,20 @@ #pragma clang diagnostic ignored "-Wprotocol" @implementation FlutterWebRTCPlugin { - #pragma clang diagnostic pop - FlutterMethodChannel *_methodChannel; + FlutterEventSink _eventSink; + FlutterEventChannel* _eventChannel; id _registry; id _messenger; id _textures; BOOL _speakerOn; + AVAudioSessionPort _preferredInput; } @synthesize messenger = _messenger; +@synthesize eventSink = _eventSink; +@synthesize preferredInput = _preferredInput; + (void)registerWithRegistrar:(NSObject*)registrar { @@ -53,12 +56,17 @@ - (instancetype)initWithChannel:(FlutterMethodChannel *)channel self = [super init]; + FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:@"FlutterWebRTC.Event" + binaryMessenger:messenger]; + [eventChannel setStreamHandler:self]; + if (self) { _methodChannel = channel; _registry = registrar; _textures = textures; _messenger = messenger; _speakerOn = NO; + _eventChannel = eventChannel; #if TARGET_OS_IPHONE self.viewController = viewController; #endif @@ -81,35 +89,59 @@ - (instancetype)initWithChannel:(FlutterMethodChannel *)channel self.renders = [NSMutableDictionary new]; self.videoCapturerStopHandlers = [NSMutableDictionary new]; #if TARGET_OS_IPHONE + _preferredInput = AVAudioSessionPortHeadphones; + _speakerOn = NO; + [AudioUtils setSpeakerphoneOn:_speakerOn]; AVAudioSession *session = [AVAudioSession sharedInstance]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didSessionRouteChange:) name:AVAudioSessionRouteChangeNotification object:session]; #endif #if TARGET_OS_OSX - [self enableDesktopCapturerEventChannel:_messenger]; + [_peerConnectionFactory.audioDeviceModule setDevicesUpdatedHandler:^(void) { + NSLog(@"Handle Devices Updated!"); + if(self.eventSink) { + self.eventSink(@{@"event" : @"onDeviceChange"}); + } + }]; #endif return self; } +#pragma mark - FlutterStreamHandler methods + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + _eventSink = sink; + return nil; +} - (void)didSessionRouteChange:(NSNotification *)notification { #if TARGET_OS_IPHONE NSDictionary *interuptionDict = notification.userInfo; NSInteger routeChangeReason = [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue]; - + AVAudioSession* session = [AVAudioSession sharedInstance]; switch (routeChangeReason) { case AVAudioSessionRouteChangeReasonCategoryChange: { NSError* error; - [[AVAudioSession sharedInstance] overrideOutputAudioPort:_speakerOn? AVAudioSessionPortOverrideSpeaker : AVAudioSessionPortOverrideNone error:&error]; + [session overrideOutputAudioPort:_speakerOn? AVAudioSessionPortOverrideSpeaker : AVAudioSessionPortOverrideNone error:&error]; break; } case AVAudioSessionRouteChangeReasonNewDeviceAvailable: { - [AudioUtils setPreferHeadphoneInput]; + [AudioUtils selectAudioInput:_preferredInput]; break; } - default: break; } + if(self.eventSink && AVAudioSessionRouteChangeReasonOverride != routeChangeReason) { + self.eventSink(@{@"event" : @"onDeviceChange"}); + } #endif } @@ -152,6 +184,14 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result [self createLocalMediaStream:result]; } else if ([@"getSources" isEqualToString:call.method]) { [self getSources:result]; + } else if([@"selectAudioInput" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* deviceId = argsMap[@"deviceId"]; + [self selectAudioInput:deviceId result:result]; + } else if([@"selectAudioOutput" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* deviceId = argsMap[@"deviceId"]; + [self selectAudioOutput:deviceId result:result]; } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { NSDictionary* argsMap = call.arguments; NSString* streamId = argsMap[@"streamId"]; @@ -600,23 +640,17 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result audioTrack.isEnabled = !mute.boolValue; } result(nil); - } else if ([@"enableSpeakerphone" isEqualToString:call.method]) { + } #if TARGET_OS_IPHONE + else if ([@"enableSpeakerphone" isEqualToString:call.method]) { NSDictionary* argsMap = call.arguments; NSNumber* enable = argsMap[@"enable"]; _speakerOn = enable.boolValue; - AVAudioSession *audioSession = [AVAudioSession sharedInstance]; - [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord - withOptions:_speakerOn ? AVAudioSessionCategoryOptionDefaultToSpeaker - : - AVAudioSessionCategoryOptionAllowBluetooth|AVAudioSessionCategoryOptionAllowBluetoothA2DP - error:nil]; - [audioSession setActive:YES error:nil]; + [AudioUtils setSpeakerphoneOn:_speakerOn]; result(nil); -#else - result(FlutterMethodNotImplemented); + } #endif - } else if ([@"getLocalDescription" isEqualToString:call.method]) { + else if ([@"getLocalDescription" isEqualToString:call.method]) { NSDictionary* argsMap = call.arguments; NSString* peerConnectionId = argsMap[@"peerConnectionId"]; RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; @@ -1052,7 +1086,7 @@ -(void)mediaStreamGetTracks:(NSString*)streamId } result(@{@"audioTracks": audioTracks, @"videoTracks" : videoTracks }); - }else{ + } else { result(nil); } } diff --git a/example/lib/main.dart b/example/lib/main.dart index c2c7619c3e..409772611a 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -65,6 +65,7 @@ class _MyAppState extends State { @override Widget build(BuildContext context) { return MaterialApp( + debugShowCheckedModeBanner: false, home: Scaffold( appBar: AppBar( title: Text('Flutter-WebRTC example'), diff --git a/example/lib/src/get_user_media_sample.dart b/example/lib/src/get_user_media_sample.dart index 8219ac0c45..9cc0296bae 100644 --- a/example/lib/src/get_user_media_sample.dart +++ b/example/lib/src/get_user_media_sample.dart @@ -30,6 +30,10 @@ class _GetUserMediaSampleState extends State { void initState() { super.initState(); initRenderers(); + navigator.mediaDevices.ondevicechange = (event) async { + print('++++++ ondevicechange ++++++'); + _mediaDevicesList = await navigator.mediaDevices.enumerateDevices(); + }; } @override @@ -39,6 +43,7 @@ class _GetUserMediaSampleState extends State { _hangUp(); } _localRenderer.dispose(); + navigator.mediaDevices.ondevicechange = null; } void initRenderers() async { diff --git a/example/lib/src/loopback_sample_unified_tracks.dart b/example/lib/src/loopback_sample_unified_tracks.dart index b38e8efdbf..c8479ac7a6 100644 --- a/example/lib/src/loopback_sample_unified_tracks.dart +++ b/example/lib/src/loopback_sample_unified_tracks.dart @@ -21,10 +21,11 @@ class _MyAppState extends State { bool _inCalling = false; bool _micOn = false; bool _cameraOn = false; - + bool _speakerOn = false; + List? _mediaDevicesList; final _configuration = { 'iceServers': [ - {'url': 'stun:stun.l.google.com:19302'}, + //{'url': 'stun:stun.l.google.com:19302'}, ], 'sdpSemantics': 'unified-plan' }; @@ -42,14 +43,38 @@ class _MyAppState extends State { super.initState(); initRenderers(); initLocalConnection(); + _refreshMediaDevices(); + navigator.mediaDevices.ondevicechange = (event) async { + print('++++++ ondevicechange ++++++'); + var devices = await navigator.mediaDevices.enumerateDevices(); + setState(() { + _mediaDevicesList = devices; + }); + }; } @override void deactivate() { super.deactivate(); + navigator.mediaDevices.ondevicechange = null; _cleanUp(); } + Future _refreshMediaDevices() async { + var devices = await navigator.mediaDevices.enumerateDevices(); + setState(() { + _mediaDevicesList = devices; + }); + } + + void _selectAudioOutput(String deviceId) async { + await _localRenderer.audioOutput(deviceId); + } + + void _selectAudioInput(String deviceId) async { + await Helper.selectAudioInput(deviceId); + } + void _cleanUp() async { try { await _localStream?.dispose(); @@ -221,8 +246,8 @@ class _MyAppState extends State { } catch (e) { print(e.toString()); } - if (!mounted) return; + if (!mounted) return; setState(() { _inCalling = true; }); @@ -270,8 +295,7 @@ class _MyAppState extends State { 'video': video ? { 'mandatory': { - 'minWidth': - '640', // Provide your own width, height and frame rate here + 'minWidth': '640', 'minHeight': '480', 'minFrameRate': '30', }, @@ -318,6 +342,7 @@ class _MyAppState extends State { void _startAudio() async { var newStream = await navigator.mediaDevices .getUserMedia(_getMediaConstraints(audio: true, video: false)); + if (_localStream != null) { await _removeExistingAudioTrack(); for (var newTrack in newStream.getAudioTracks()) { @@ -343,6 +368,13 @@ class _MyAppState extends State { }); } + void _switchSpeaker() async { + setState(() { + _speakerOn = !_speakerOn; + Helper.setSpeakerphoneOn(_speakerOn); + }); + } + Future _removeExistingVideoTrack({bool fromConnection = false}) async { var tracks = _localStream!.getVideoTracks(); for (var i = tracks.length - 1; i >= 0; i--) { @@ -437,6 +469,42 @@ class _MyAppState extends State { return Scaffold( appBar: AppBar( title: Text('LoopBack Unified Tracks example'), + actions: [ + PopupMenuButton( + onSelected: _selectAudioInput, + icon: Icon(Icons.settings_voice), + itemBuilder: (BuildContext context) { + if (_mediaDevicesList != null) { + return _mediaDevicesList! + .where((device) => device.kind == 'audioinput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + } + return []; + }, + ), + PopupMenuButton( + onSelected: _selectAudioOutput, + icon: Icon(Icons.volume_down_alt), + itemBuilder: (BuildContext context) { + if (_mediaDevicesList != null) { + return _mediaDevicesList! + .where((device) => device.kind == 'audiooutput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + } + return []; + }, + ), + ], ), body: OrientationBuilder( builder: (context, orientation) { @@ -463,6 +531,15 @@ class _MyAppState extends State { tooltip: _micOn ? 'Stop mic' : 'Start mic', onPressed: _micOn ? _stopAudio : _startAudio, child: Icon(_micOn ? Icons.mic : Icons.mic_off)), + FloatingActionButton( + heroTag: null, + backgroundColor: + _speakerOn ? null : Theme.of(context).disabledColor, + tooltip: _speakerOn ? 'Stop speaker' : 'Start speaker', + onPressed: _switchSpeaker, + child: Icon(_speakerOn + ? Icons.speaker_phone + : Icons.phone_in_talk)), FloatingActionButton( heroTag: null, backgroundColor: diff --git a/lib/src/helper.dart b/lib/src/helper.dart index 468f6d69a9..ed6f8a55bd 100644 --- a/lib/src/helper.dart +++ b/lib/src/helper.dart @@ -25,6 +25,29 @@ class Helper { static Future> get audiooutputs => enumerateDevices('audiooutput'); + /// Used to select a specific audio output device. + /// + /// Note: This method is only used for flutter native, + /// flutter web can use RTCVideoRenderer.audioOutput instead + static Future selectAudioOutput(String deviceId) async { + await mediaDevices + .selectAudioOutput(AudioOutputOptions(deviceId: deviceId)); + } + + static Future selectAudioInput(String deviceId) async { + await WebRTC.invokeMethod( + 'selectAudioInput', + {'deviceId': deviceId}, + ); + } + + static Future setSpeakerphoneOn(bool enable) async { + await WebRTC.invokeMethod( + 'enableSpeakerphone', + {'enable': enable}, + ); + } + /// To select a a specific camera, you need to set constraints /// eg. /// constraints = { diff --git a/lib/src/native/desktop_capturer_impl.dart b/lib/src/native/desktop_capturer_impl.dart index 44ef402852..4e3c21cb96 100644 --- a/lib/src/native/desktop_capturer_impl.dart +++ b/lib/src/native/desktop_capturer_impl.dart @@ -1,7 +1,7 @@ import 'dart:async'; import 'dart:typed_data'; -import 'package:flutter/services.dart'; +import 'package:flutter_webrtc/src/native/event_channel.dart'; import '../desktop_capturer.dart'; import 'utils.dart'; @@ -63,19 +63,19 @@ class DesktopCapturerSourceNative extends DesktopCapturerSource { class DesktopCapturerNative extends DesktopCapturer { DesktopCapturerNative._internal() { - EventChannel('FlutterWebRTC/desktopSourcesEvent') - .receiveBroadcastStream() - .listen(eventListener, onError: errorListener); + FlutterWebRTCEventChannel.instance.handleEvents.stream.listen((data) { + var event = data.keys.first; + Map map = data[event]; + handleEvent(event, map); + }); } - static final DesktopCapturerNative instance = DesktopCapturerNative._internal(); final Map _sources = {}; - void eventListener(dynamic event) async { - final Map map = event; - switch (map['event']) { + void handleEvent(String event, Map map) async { + switch (event) { case 'desktopSourceAdded': final source = DesktopCapturerSourceNative.fromMap(map); if (_sources[source.id] == null) { diff --git a/lib/src/native/event_channel.dart b/lib/src/native/event_channel.dart new file mode 100644 index 0000000000..de43755a0a --- /dev/null +++ b/lib/src/native/event_channel.dart @@ -0,0 +1,28 @@ +import 'dart:async'; + +import 'package:flutter/services.dart'; + +class FlutterWebRTCEventChannel { + FlutterWebRTCEventChannel._internal() { + EventChannel('FlutterWebRTC.Event') + .receiveBroadcastStream() + .listen(eventListener, onError: errorListener); + } + + static final FlutterWebRTCEventChannel instance = + FlutterWebRTCEventChannel._internal(); + + final StreamController> handleEvents = + StreamController.broadcast(); + + void eventListener(dynamic event) async { + final Map map = event; + handleEvents.add({map['event'] as String: map}); + } + + void errorListener(Object obj) { + if (obj is Exception) { + throw obj; + } + } +} diff --git a/lib/src/native/factory_impl.dart b/lib/src/native/factory_impl.dart index af118aa622..e9017a5c1e 100644 --- a/lib/src/native/factory_impl.dart +++ b/lib/src/native/factory_impl.dart @@ -1,5 +1,6 @@ import 'dart:async'; +import 'package:flutter_webrtc/src/native/mediadevices_impl.dart'; import 'package:webrtc_interface/webrtc_interface.dart'; import '../desktop_capturer.dart'; @@ -59,7 +60,7 @@ class RTCFactoryNative extends RTCFactory { } @override - Navigator get navigator => NavigatorNative(); + Navigator get navigator => NavigatorNative.instance; } Future createPeerConnection( @@ -80,3 +81,5 @@ MediaRecorder mediaRecorder() { Navigator get navigator => RTCFactoryNative.instance.navigator; DesktopCapturer get desktopCapturer => DesktopCapturerNative.instance; + +MediaDevices get mediaDevices => MediaDeviceNative.instance; diff --git a/lib/src/native/media_stream_track_impl.dart b/lib/src/native/media_stream_track_impl.dart index a7ebddf6eb..f86f2e5fbc 100644 --- a/lib/src/native/media_stream_track_impl.dart +++ b/lib/src/native/media_stream_track_impl.dart @@ -64,13 +64,10 @@ class MediaStreamTrackNative extends MediaStreamTrack { @override Future switchCamera() => Helper.switchCamera(this); + @Deprecated('Use Helper.setSpeakerphoneOn instead') @override void enableSpeakerphone(bool enable) async { - print('MediaStreamTrack:enableSpeakerphone $enable'); - await WebRTC.invokeMethod( - 'enableSpeakerphone', - {'trackId': _trackId, 'enable': enable}, - ); + return Helper.setSpeakerphoneOn(enable); } @override diff --git a/lib/src/native/mediadevices_impl.dart b/lib/src/native/mediadevices_impl.dart index fe4c0654ca..5f988ad4ef 100644 --- a/lib/src/native/mediadevices_impl.dart +++ b/lib/src/native/mediadevices_impl.dart @@ -4,10 +4,29 @@ import 'package:flutter/services.dart'; import 'package:webrtc_interface/webrtc_interface.dart'; +import 'event_channel.dart'; import 'media_stream_impl.dart'; import 'utils.dart'; class MediaDeviceNative extends MediaDevices { + MediaDeviceNative._internal() { + FlutterWebRTCEventChannel.instance.handleEvents.stream.listen((data) { + var event = data.keys.first; + Map map = data.values.first; + handleEvent(event, map); + }); + } + + static final MediaDeviceNative instance = MediaDeviceNative._internal(); + + void handleEvent(String event, final Map map) async { + switch (map['event']) { + case 'onDeviceChange': + ondevicechange?.call(null); + break; + } + } + @override Future getUserMedia( Map mediaConstraints) async { @@ -79,4 +98,14 @@ class MediaDeviceNative extends MediaDevices { ) .toList(); } + + @override + Future selectAudioOutput( + [AudioOutputOptions? options]) async { + await WebRTC.invokeMethod('selectAudioOutput', { + 'deviceId': options?.deviceId, + }); + // TODO: return the selected device + return MediaDeviceInfo(label: 'label', deviceId: options!.deviceId); + } } diff --git a/lib/src/native/navigator_impl.dart b/lib/src/native/navigator_impl.dart index 4d6b111986..cafe6d7dce 100644 --- a/lib/src/native/navigator_impl.dart +++ b/lib/src/native/navigator_impl.dart @@ -3,6 +3,10 @@ import 'package:webrtc_interface/webrtc_interface.dart'; import 'mediadevices_impl.dart'; class NavigatorNative extends Navigator { + NavigatorNative._internal(); + + static final NavigatorNative instance = NavigatorNative._internal(); + @override Future getDisplayMedia(Map mediaConstraints) { return mediaDevices.getDisplayMedia(mediaConstraints); @@ -19,5 +23,5 @@ class NavigatorNative extends Navigator { } @override - MediaDevices get mediaDevices => MediaDeviceNative(); + MediaDevices get mediaDevices => MediaDeviceNative.instance; } diff --git a/lib/src/native/rtc_video_renderer_impl.dart b/lib/src/native/rtc_video_renderer_impl.dart index d91c1c7c0c..99176e3af2 100644 --- a/lib/src/native/rtc_video_renderer_impl.dart +++ b/lib/src/native/rtc_video_renderer_impl.dart @@ -116,8 +116,13 @@ class RTCVideoRenderer extends ValueNotifier } @override - Future audioOutput(String deviceId) { - // TODO(cloudwebrtc): related to https://github.com/flutter-webrtc/flutter-webrtc/issues/395 - throw UnimplementedError('This is not implement yet'); + Future audioOutput(String deviceId) async { + try { + await Helper.selectAudioOutput(deviceId); + } catch (e) { + print('Helper.selectAudioOutput ${e.toString()}'); + return false; + } + return true; } } diff --git a/pubspec.yaml b/pubspec.yaml index 8a70bfc5bb..786dd93a36 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -7,11 +7,11 @@ environment: flutter: '>=1.22.0' dependencies: - dart_webrtc: ^1.0.6 + dart_webrtc: ^1.0.7 flutter: sdk: flutter path_provider: ^2.0.2 - webrtc_interface: ^1.0.5 + webrtc_interface: ^1.0.7 dev_dependencies: flutter_test: diff --git a/third_party/libwebrtc/include/rtc_audio_device.h b/third_party/libwebrtc/include/rtc_audio_device.h index 95383d7cd4..6ae365c57f 100644 --- a/third_party/libwebrtc/include/rtc_audio_device.h +++ b/third_party/libwebrtc/include/rtc_audio_device.h @@ -6,6 +6,8 @@ namespace libwebrtc { class RTCAudioDevice : public RefCountInterface { + public: + typedef fixed_size_function OnDeviceChangeCallback; public: static const int kAdmMaxDeviceNameSize = 128; static const int kAdmMaxFileNameSize = 512; @@ -30,6 +32,8 @@ class RTCAudioDevice : public RefCountInterface { virtual int32_t SetRecordingDevice(uint16_t index) = 0; + virtual int32_t OnDeviceChange(OnDeviceChangeCallback listener) = 0; + protected: virtual ~RTCAudioDevice() {} }; diff --git a/third_party/libwebrtc/lib/win/x64/libwebrtc.dll b/third_party/libwebrtc/lib/win/x64/libwebrtc.dll index 4ccaf9fe31..cec10df062 100644 Binary files a/third_party/libwebrtc/lib/win/x64/libwebrtc.dll and b/third_party/libwebrtc/lib/win/x64/libwebrtc.dll differ