8000 [WIP] Add selectAudioOutput method. by cloudwebrtc · Pull Request #1030 · flutter-webrtc/flutter-webrtc · GitHub
[go: up one dir, main page]

Skip to content

[WIP] Add selectAudioOutput method. #1030

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 31 commits into from
Aug 22, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
472eeaa
chore: Add selectAudioOutput method.
cloudwebrtc Jul 29, 2022
f67f401
cleanup.
cloudwebrtc Jul 29, 2022
e941e12
update.
cloudwebrtc Jul 29, 2022
c772588
bump version.
cloudwebrtc Aug 1, 2022
d584119
feat: selectAudioOutput support for macOS.
cloudwebrtc Aug 1, 2022
538ca4e
Android audio outputs and audio selection
davidliu Aug 1, 2022
0f411a8
chore: code format.
cloudwebrtc Aug 3, 2022
21e1ab2
chore: update loopback sample.
cloudwebrtc Aug 3, 2022
0a87072
update webrtc-interface.
cloudwebrtc Aug 8, 2022
e8ef0ec
Fix compilation errors for iOS.
cloudwebrtc Aug 8, 2022
c6ba324
Audio device enumeration for iOS.
cloudwebrtc Aug 8, 2022
5b1f1a6
Merge branch 'main' into feat/select-audio-output
cloudwebrtc Aug 8, 2022
0d45de2
Merge branch 'main' into feat/select-audio-output
cloudwebrtc Aug 9, 2022
86dd6f7
Add mediaDeviceEvent for MediaDeviceNative.
cloudwebrtc Aug 14, 2022
7a0d1f2
feat: Implemented ondevicechang events for windows.
cloudwebrtc Aug 14, 2022
601e2da
chore: feat: Implemented ondevicechange events for Android.
cloudwebrtc Aug 15, 2022
0cc6b4b
Tidy up the code.
cloudwebrtc Aug 15, 2022
32fdb18
feat: Implemented ondevicechange events for macOS.
cloudwebrtc Aug 15, 2022
ccd8079
chore: Use the unified Events channel to transmit the ondevicechange …
cloudwebrtc Aug 15, 2022
24e681b
chore: Use the unified Events channel to transmit the ondevicechange …
cloudwebrtc Aug 15, 2022
cb5561e
Remove unused import.
cloudwebrtc Aug 15, 2022
c7409da
feat: Implemented ondevicechange events for iOS.
cloudwebrtc Aug 15, 2022
2a9dab2
Replace AVAudioSession with RTCAudioSession.
cloudwebrtc Aug 15, 2022
1dac603
Add setPreferredInput/setPreferredOutput.
cloudwebrtc Aug 19, 2022
4f55441
Implementation returns a list of audio sources,imp fun setPreferredIn…
zjzhang-cn Aug 19, 2022
a29d2ff
update.
cloudwebrtc Aug 19, 2022
2f3eef0
Merge branch 'main' into feat/select-audio-output
cloudwebrtc Aug 19, 2022
b0cf535
Add SetPreferredInput for win.
cloudwebrtc Aug 19, 2022
ac9f0ab
update.
cloudwebrtc Aug 19, 2022
264bf6d
update.
cloudwebrtc Aug 19, 2022
9205ee4
fix compile error for mac.
cloudwebrtc Aug 19, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Changelog

--------------------------------------------

[0.9.3] - 2022-08-15

* [Windows/macOS] Fix UI freeze when getting thumbnails.
Expand All @@ -11,6 +12,11 @@
* [iOS/macOS] update WebRTC-SDK to 104.5112.02.
* [Windows] update libwebrtc.dll to 104.5112.02.

[0.9.1] - 2022-08-01

* [iOS] fix : iOS app could not change camera resolutions cause by wrong datatype in the video Contraints.
* [Darwin] bump version for .podspec.

[0.9.0] - 2022-07-27

* [macOS] Added screen-sharing support for macOS
Expand Down
1 change: 1 addition & 0 deletions android/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ android {

dependencies {
implementation 'com.github.webrtc-sdk:android:104.5112.01'
implementation "com.twilio:audioswitch:1.1.5"
implementation 'androidx.annotation:annotation:1.1.0'
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

import org.webrtc.EglBase;
import org.webrtc.MediaStream;
import org.webrtc.MediaStreamTrack;
import org.webrtc.RendererCommon.RendererEvents;
import org.webrtc.VideoTrack;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,37 +5,41 @@
import android.content.Context;
import android.os.Bundle;
import android.util.Log;

import androidx.annotation.NonNull;
import androidx.lifecycle.DefaultLifecycleObserver;
import androidx.lifecycle.Lifecycle;
import androidx.lifecycle.LifecycleOwner;
import com.cloudwebrtc.webrtc.MethodCallHandlerImpl.AudioManager;
import com.cloudwebrtc.webrtc.utils.RTCAudioManager;
import io.flutter.embedding.android.FlutterActivity;

import com.cloudwebrtc.webrtc.audio.AudioSwitchManager;
import com.cloudwebrtc.webrtc.utils.AnyThreadSink;
import com.cloudwebrtc.webrtc.utils.ConstraintsMap;

import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.embedding.engine.plugins.activity.ActivityAware;
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
import io.flutter.embedding.engine.plugins.lifecycle.HiddenLifecycleReference;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.EventChannel;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.PluginRegistry.Registrar;
import io.flutter.view.TextureRegistry;

import java.util.Set;

/**
* FlutterWebRTCPlugin
*/
public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware {
public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware, EventChannel.StreamHandler {

static public final String TAG = "FlutterWebRTCPlugin";
private static Application application;

private RTCAudioManager rtcAudioManager;
private MethodChannel channel;
private AudioSwitchManager audioSwitchManager;
private MethodChannel methodChannel;
private MethodCallHandlerImpl methodCallHandler;
private LifeCycleObserver observer;
private Lifecycle lifecycle;
private EventChannel eventChannel;
public EventChannel.EventSink eventSink;

public FlutterWebRTCPlugin() {
}
Expand Down Expand Up @@ -103,62 +107,48 @@ public void onDetachedFromActivity() {

private void startListening(final Context context, BinaryMessenger messenger,
TextureRegistry textureRegistry) {
audioSwitchManager = new AudioSwitchManager(context);
methodCallHandler = new MethodCallHandlerImpl(context, messenger, textureRegistry,
new AudioManager() {
@Override
public void onAudioManagerRequested(boolean requested) {
if (requested) {
if (rtcAudioManager == null) {
rtcAudioManager = RTCAudioManager.create(context);
}
rtcAudioManager.start(FlutterWebRTCPlugin.this::onAudioManagerDevicesChanged);
} else {
if (rtcAudioManager != null) {
rtcAudioManager.stop();
rtcAudioManager = null;
}
}
}

@Override
public void setMicrophoneMute(boolean mute) {
if (rtcAudioManager != null) {
rtcAudioManager.setMicrophoneMute(mute);
}
}

@Override
public void setSpeakerphoneOn(boolean on) {
if (rtcAudioManager != null) {
rtcAudioManager.setSpeakerphoneOn(on);
}
}
});

channel = new MethodChannel(messenger, "FlutterWebRTC.Method");
channel.setMethodCallHandler(methodCallHandler);
audioSwitchManager);
methodChannel = new MethodChannel(messenger, "FlutterWebRTC.Method");
methodChannel.setMethodCallHandler(methodCallHandler);
eventChannel = new EventChannel( messenger,"FlutterWebRTC.Event");
eventChannel.setStreamHandler(this);
audioSwitchManager.audioDeviceChangeListener = (devices, currentDevice) -> {
Log.w(TAG, "audioFocusChangeListener " + devices+ " " + currentDevice);
ConstraintsMap params = new ConstraintsMap();
params.putString("event", "onDeviceChange");
sendEvent(params.toMap());
return null;
};
audioSwitchManager.start();
}

private void stopListening() {
methodCallHandler.dispose();
methodCallHandler = null;
channel.setMethodCallHandler(null);

if (rtcAudioManager != null) {
methodChannel.setMethodCallHandler(null);
eventChannel.setStreamHandler(null);
if (audioSwitchManager != null) {
Log.d(TAG, "Stopping the audio manager...");
rtcAudioManager.stop();
rtcAudioManager = null;
audioSwitchManager.stop();
audioSwitchManager = null;
}
}

// This method is called when the audio manager reports audio device change,
// e.g. from wired headset to speakerphone.
private void onAudioManagerDevicesChanged(
final RTCAudioManager.AudioDevice device,
final Set<RTCAudioManager.AudioDevice> availableDevices) {
Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", "
+ "selected: " + device);
// TODO(henrika): add callback handler.
@Override
public void onListen(Object arguments, EventChannel.EventSink events) {
eventSink = new AnyThreadSink(events);
}
@Override
public void onCancel(Object arguments) {
eventSink = null;
}

public void sendEvent(Object event) {
if(eventSink != null) {
eventSink.success(event);
}
}

private class LifeCycleObserver implements Application.ActivityLifecycleCallbacks, DefaultLifecycleObserver {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1065,10 +1065,10 @@ public class VideoCapturerInfo {
}

@RequiresApi(api = VERSION_CODES.M)
void setPreferredInputDevice(int i){
void setPreferredInputDevice(int i) {
android.media.AudioManager audioManager = ((android.media.AudioManager) applicationContext.getSystemService(Context.AUDIO_SERVICE));
final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_INPUTS);
if (devices.length>i){
if (devices.length > i) {
audioDeviceModule.setPreferredInputDevice(devices[i]);
}
}
Expand Down
B41A
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;

import com.cloudwebrtc.webrtc.audio.AudioDeviceKind;
import com.cloudwebrtc.webrtc.audio.AudioSwitchManager;
import com.cloudwebrtc.webrtc.record.AudioChannel;
import com.cloudwebrtc.webrtc.record.FrameCapturer;
import com.cloudwebrtc.webrtc.utils.AnyThreadResult;
Expand All @@ -28,6 +30,8 @@
import com.cloudwebrtc.webrtc.utils.ObjectType;
import com.cloudwebrtc.webrtc.utils.PermissionUtils;

import com.twilio.audioswitch.AudioDevice;

import org.webrtc.AudioTrack;
import org.webrtc.CryptoOptions;
import org.webrtc.DefaultVideoDecoderFactory;
Expand Down Expand Up @@ -81,17 +85,6 @@
import io.flutter.view.TextureRegistry.SurfaceTextureEntry;

public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider {
interface AudioManager {

void onAudioManagerRequested(boolean requested);

void setMicrophoneMute(boolean mute);

void setSpeakerphoneOn(boolean on);


}

static public final String TAG = "FlutterWebRTCPlugin";

private final Map<String, PeerConnectionObserver> mPeerConnectionObservers = new HashMap<>();
Expand All @@ -112,18 +105,18 @@ interface AudioManager {
*/
private GetUserMediaImpl getUserMediaImpl;

private final AudioManager audioManager;
private final AudioSwitchManager audioSwitchManager;

private AudioDeviceModule audioDeviceModule;

private Activity activity;

MethodCallHandlerImpl(Context context, BinaryMessenger messenger, TextureRegistry textureRegistry,
@NonNull AudioManager audioManager) {
@NonNull AudioSwitchManager audioManager) {
this.context = context;
this.textures = textureRegistry;
this.messenger = messenger;
this.audioManager = audioManager;
this.audioSwitchManager = audioManager;
}

static private void resultError(String method, String error, Result result) {
Expand Down Expand Up @@ -472,14 +465,29 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) {
result.success(null);
break;
}
case "selectAudioOutput": {
String deviceId = call.argument("deviceId");
audioSwitchManager.selectAudioOutput(AudioDeviceKind.fromTypeName(deviceId));
result.success(null);
break;
}
case "setMicrophoneMute":
boolean mute = call.argument("mute");
audioManager.setMicrophoneMute(mute);
audioSwitchManager.setMicrophoneMute(mute);
result.success(null);
break;
case "selectAudioInput":
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1) {
String deviceId = call.argument("deviceId");
getUserMediaImpl.setPreferredInputDevice(Integer.parseInt(deviceId));
result.success(null);
} else {
result.notImplemented();
}
break;
case "enableSpeakerphone":
boolean enable = call.argument("enable");
audioManager.setSpeakerphoneOn(enable);
audioSwitchManager.enableSpeakerphone(enable);
result.success(null);
break;
case "getDisplayMedia": {
Expand Down Expand Up @@ -980,14 +988,14 @@ public String peerConnectionInit(ConstraintsMap configuration, ConstraintsMap co
if (mPeerConnectionObservers.size() == 0) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S
|| context.getApplicationInfo().targetSdkVersion < Build.VERSION_CODES.S) {
audioManager.onAudioManagerRequested(true);
//audioSwitchManager.start();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Look strange that this if execute nothing. I didn't know if we need to start the manager or not. But having a if that does nothing seems useless

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I rechecked the code, this piece should be able to be removed,
audioSwitchManager has already called start when it is created, and start is only used to monitor device changes, so it will not take over the audio route

Copy link
@fcancela fcancela Sep 20, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this Bluetooth permission necessary?

...
        ArrayList<String> permissions = new ArrayList<>();
        permissions.add(Manifest.permission.BLUETOOTH_CONNECT);
        requestPermissions(
                permissions,
                (args) -> {
                  //audioSwitchManager.start();
                },
                (args) -> {
                });
      }
    }
...

I'm getting requested access for nearby devices but from my understanding, this shouldn't be required. My app does not require bluetooth.

Can it be removed? If not, is there a way to prevent this line from being called?

Thanks in advance!

} else {
ArrayList<String> permissions = new ArrayList<>();
permissions.add(Manifest.permission.BLUETOOTH_CONNECT);
requestPermissions(
permissions,
(args) -> {
audioManager.onAudioManagerRequested(true);
//audioSwitchManager.start();
},
(args) -> {
});
Expand Down Expand Up @@ -1143,22 +1151,36 @@ public void getSources(Result result) {
audio.putString("kind", "audioinput");
array.pushMap(audio);
} else {
android.media.AudioManager audioManager = ((android.media.AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
android.media.AudioManager audioManager = ((android.media.AudioManager) context
.getSystemService(Context.AUDIO_SERVICE));
final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_INPUTS);
for (int i=0;i<devices.length;i++) {
AudioDeviceInfo device=devices[i];
int type = (device.getType() & 0xFF);
String label=Build.VERSION.SDK_INT < Build.VERSION_CODES.P ? String.valueOf(i) : device.getAddress();
ConstraintsMap audio = new ConstraintsMap();
audio.putString("label", label);
audio.putString("deviceId", String.valueOf(i));
audio.putString("groupId", ""+type);
audio.putString("facing", "");
audio.putString("kind", "audioinput");
array.pushMap(audio);
for (int i = 0; i < devices.length; i++) {
AudioDeviceInfo device = devices[i];
if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC || device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO ||
device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
int type = (device.getType() & 0xFF);
String label = Build.VERSION.SDK_INT < Build.VERSION_CODES.P ? String.valueOf(i) : device.getAddress();
ConstraintsMap audio = new ConstraintsMap();
audio.putString("label", label);
audio.putString("deviceId", String.valueOf(i));
audio.putString("groupId", "" + type);
audio.putString("facing", "");
audio.putString("kind", "audioinput");
array.pushMap(audio);
}
}
}

List<? extends AudioDevice> audioOutputs = audioSwitchManager.availableAudioDevices();

for (AudioDevice audioOutput : audioOutputs) {
ConstraintsMap audioOutputMap = new ConstraintsMap();
audioOutputMap.putString("label", audioOutput.getName());
audioOutputMap.putString("deviceId", AudioDeviceKind.fromAudioDevice(audioOutput).typeName);
audioOutputMap.putString("facing", "");
audioOutputMap.putString("kind", "audiooutput");
array.pushMap(audioOutputMap);
}

ConstraintsMap map = new ConstraintsMap();
map.putArray("sources", array.toArrayList());
Expand Down Expand Up @@ -1545,7 +1567,7 @@ public void peerConnectionDispose(final String id) {
mPeerConnectionObservers.remove(id);
}
if (mPeerConnectionObservers.size() == 0) {
audioManager.onAudioManagerRequested(false);
//audioSwitchManager.stop();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same as my previous comments @cloudwebrtc

}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package com.cloudwebrtc.webrtc;

import android.util.Log;
import android.util.SparseArray;
import androidx.annotation.Nullable;
import com.cloudwebrtc.webrtc.utils.AnyThreadSink;
import com.cloudwebrtc.webrtc.utils.ConstraintsArray;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package com.cloudwebrtc.webrtc

import android.util.Log
import org.webrtc.*
import java.util.concurrent.Callable
import java.util.concurrent.ExecutorService
Expand Down
Loading
0