diff --git a/.gitignore b/.gitignore
index e2fafa6ebf..128f9690dd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,6 +3,7 @@
.DS_Store
.packages
.vscode/.DS_Store
+.dart_tool/
example/pubspec.lock
pubspec.lock
example/ios/Podfile.lock
@@ -13,3 +14,21 @@ example/android/gradle*
WorkspaceSettings.xcsettings
example/.flutter-plugins
example/android/local.properties
+android/.gradle
+android/.settings
+android/gradle
+android/gradlew
+android/gradlew.bat
+android/local.properties
+example/android/.settings
+example/.flutter-plugins-dependencies
+example/android/.project/
+example/android/app/.classpath
+example/ios/Flutter/flutter_export_environment.sh
+example/ios/Flutter/Generated.xcconfig
+example/ios/Pods/.symlinks
+example/ios/Pods/Local Podspecs/
+example/ios/Runner/GeneratedPluginRegistrant.h
+example/ios/Runner/GeneratedPluginRegistrant.m
+example/ios/Runner.xcworkspace/xcuserdata/
+example/android/.project
diff --git a/android/.classpath b/android/.classpath
new file mode 100644
index 0000000000..eb19361b57
--- /dev/null
+++ b/android/.classpath
@@ -0,0 +1,6 @@
+
+
+
+
+
+
diff --git a/android/.project b/android/.project
new file mode 100644
index 0000000000..ee0380c092
--- /dev/null
+++ b/android/.project
@@ -0,0 +1,23 @@
+
+
+ flutter_webrtc
+ Project android_ created by Buildship.
+
+
+
+
+ org.eclipse.jdt.core.javabuilder
+
+
+
+
+ org.eclipse.buildship.core.gradleprojectbuilder
+
+
+
+
+
+ org.eclipse.jdt.core.javanature
+ org.eclipse.buildship.core.gradleprojectnature
+
+
diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java
index 855f02ca8b..5a15b6a0b1 100644
--- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java
+++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java
@@ -421,8 +421,7 @@ public void onMethodCall(MethodCall call, Result notSafeResult) {
}
} else if (call.method.equals("stopRecordToFile")) {
Integer recorderId = call.argument("recorderId");
- getUserMediaImpl.stopRecording(recorderId);
- result.success(null);
+ getUserMediaImpl.stopRecording(recorderId, result);
} else if (call.method.equals("captureFrame")) {
String path = call.argument("path");
String videoTrackId = call.argument("trackId");
@@ -477,6 +476,10 @@ public void onMethodCall(MethodCall call, Result notSafeResult) {
String kind = call.argument("kind");
String streamId = call.argument("streamId");
createSender(peerConnectionId, kind, streamId, result);
+ } else if (call.method.equals("closeSender")) {
+ String peerConnectionId = call.argument("peerConnectionId");
+ String senderId = call.argument("senderId");
+ stopSender(peerConnectionId, senderId, result);
} else if (call.method.equals("addTrack")) {
String peerConnectionId = call.argument("peerConnectionId");
String trackId = call.argument("trackId");
@@ -1000,7 +1003,7 @@ private void mediaStreamTrackStop(final String id) {
private void mediaStreamTrackSetEnabled(final String id, final boolean enabled) {
MediaStreamTrack track = getTrackForId(id);
if (track == null) {
- Log.d(TAG, "mediaStreamTrackSetEnabled() track is null");
+ Log.d(TAG, "mediaStreamTrackSetEnabled() track is null " + id);
return;
} else if (track.enabled() == enabled) {
return;
@@ -1386,8 +1389,21 @@ private void createSender(String peerConnectionId, String kind, String streamId,
}
}
- private void addTrack(String peerConnectionId, String trackId, List streamIds, Result result) {
- PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId);
+
+ private void stopSender(String peerConnectionId, String senderId, Result result) {
+ PeerConnectionObserver pco
+ = mPeerConnectionObservers.get(peerConnectionId);
+ if (pco == null || pco.getPeerConnection() == null) {
+ Log.d(TAG, "removeTrack() peerConnection is null");
+ result.error("removeTrack", "removeTrack() peerConnection is null", null);
+ } else {
+ pco.closeSender(senderId, result);
+ }
+ }
+
+ private void addTrack(String peerConnectionId, String trackId, List streamIds, Result result){
+ PeerConnectionObserver pco
+ = mPeerConnectionObservers.get(peerConnectionId);
MediaStreamTrack track = localTracks.get(trackId);
if (track == null) {
result.error("addTrack", "addTrack() track is null", null);
diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java
index d4dd8a54ff..3420c17936 100755
--- a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java
+++ b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java
@@ -585,6 +585,7 @@ private void getUserMedia(
}
String id = track.id();
+ Log.d(TAG, "MediaStream Track id: " + id);
if (track instanceof AudioTrack) {
mediaStream.addTrack((AudioTrack) track);
@@ -792,19 +793,11 @@ else if (audioChannel == AudioChannel.OUTPUT) {
mediaRecorders.append(id, mediaRecorder);
}
- void stopRecording(Integer id) {
+ void stopRecording(Integer id, Result result) {
MediaRecorderImpl mediaRecorder = mediaRecorders.get(id);
if (mediaRecorder != null) {
- mediaRecorder.stopRecording();
+ mediaRecorder.stopRecording(result);
mediaRecorders.remove(id);
- File file = mediaRecorder.getRecordFile();
- if (file != null) {
- ContentValues values = new ContentValues(3);
- values.put(MediaStore.Video.Media.TITLE, file.getName());
- values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
- values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath());
- applicationContext.getContentResolver().insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values);
- }
}
}
diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java
index 0db36fe53c..c061ffeac4 100755
--- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java
+++ b/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java
@@ -92,8 +92,8 @@ void close() {
remoteTracks.clear();
dataChannels.clear();
transceivers.clear();
- senders.clear();;
- receivers.clear();;
+ senders.clear();
+ receivers.clear();
}
void dispose() {
@@ -641,6 +641,14 @@ public void createSender(String kind, String streamId, Result result){
result.success(rtpSenderToMap(sender));
}
+ public void closeSender(String senderId, Result result) {
+ RtpSender sender = senders.get(senderId);
+ sender.dispose();
+ Map params = new HashMap<>();
+ params.put("result", true);
+ result.success(params);
+ }
+
public void addTrack(MediaStreamTrack track, List streamIds, Result result){
RtpSender sender = peerConnection.addTrack(track, streamIds);
senders.put(sender.id(),sender);
@@ -654,8 +662,8 @@ public void removeTrack(String senderId, Result result){
return;
}
boolean res = peerConnection.removeTrack(sender);
- ConstraintsMap params = new ConstraintsMap();
- params.putBoolean("result", res);
+ Map params = new HashMap<>();
+ params.put("result", res);
result.success(params);
}
diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioFileRenderer.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioFileRenderer.java
new file mode 100644
index 0000000000..9158cc7bab
--- /dev/null
+++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioFileRenderer.java
@@ -0,0 +1,153 @@
+package com.cloudwebrtc.webrtc.record;
+
+import org.webrtc.audio.JavaAudioDeviceModule;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+
+import android.media.AudioFormat;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.Log;
+
+import io.flutter.plugin.common.MethodChannel;
+
+import static android.media.MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
+
+public class AudioFileRenderer implements JavaAudioDeviceModule.SamplesReadyCallback {
+ private static final String TAG = "AudioFileRenderer";
+ private boolean isRunning = true;
+
+ private final HandlerThread audioThread;
+ private final Handler audioThreadHandler;
+ private MediaCodec audioEncoder;
+ private ByteBuffer[] audioInputBuffers;
+ private ByteBuffer[] audioOutputBuffers;
+ private MediaCodec.BufferInfo audioBufferInfo;
+ private MediaMuxer mediaMuxer;
+ private int trackIndex = -1;
+ private int audioTrackIndex = -1;
+ private long presTime = 0L;
+ private volatile boolean muxerStarted = false;
+
+ AudioFileRenderer(File outputFile) throws IOException {
+ audioThread = new HandlerThread(TAG + "AudioThread");
+ audioThread.start();
+ audioThreadHandler = new Handler(audioThread.getLooper());
+
+ mediaMuxer = new MediaMuxer(outputFile.getPath(),
+ MUXER_OUTPUT_MPEG_4);
+ }
+
+ @Override
+ public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) {
+ audioThreadHandler.post(() -> {
+ if (audioEncoder == null) try {
+ audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm");
+ MediaFormat format = new MediaFormat();
+ format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
+ format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, audioSamples.getChannelCount());
+ format.setInteger(MediaFormat.KEY_SAMPLE_RATE, audioSamples.getSampleRate());
+ format.setInteger(MediaFormat.KEY_BIT_RATE, 64 * 1024);
+ format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
+ audioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+ audioEncoder.start();
+ audioInputBuffers = audioEncoder.getInputBuffers();
+ audioOutputBuffers = audioEncoder.getOutputBuffers();
+ } catch (IOException exception) {
+ Log.wtf(TAG, exception);
+ }
+ int bufferIndex = audioEncoder.dequeueInputBuffer(0);
+ if (bufferIndex >= 0) {
+ ByteBuffer buffer = audioInputBuffers[bufferIndex];
+ buffer.clear();
+ byte[] data = audioSamples.getData();
+ buffer.put(data);
+ audioEncoder.queueInputBuffer(bufferIndex, 0, data.length, presTime, 0);
+ presTime += data.length * 125 / 12; // 1000000 microseconds / 48000hz / 2 bytes
+ }
+ drainAudio();
+ });
+ }
+
+ private void drainAudio() {
+ if (audioBufferInfo == null)
+ audioBufferInfo = new MediaCodec.BufferInfo();
+ while (true) {
+ int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 10000);
+ if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
+ break;
+ } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ // not expected for an encoder
+ audioOutputBuffers = audioEncoder.getOutputBuffers();
+ Log.d(TAG, "encoder output buffers changed");
+ } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ // not expected for an encoder
+ MediaFormat newFormat = audioEncoder.getOutputFormat();
+
+ Log.d(TAG, "encoder output format changed: " + newFormat);
+ audioTrackIndex = mediaMuxer.addTrack(newFormat);
+ if (audioTrackIndex != -1 && !muxerStarted) {
+ mediaMuxer.start();
+ muxerStarted = true;
+ }
+ if (!muxerStarted)
+ break;
+ } else if (encoderStatus < 0) {
+ Log.d(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
+ } else { // encoderStatus >= 0
+ try {
+ ByteBuffer encodedData = audioOutputBuffers[encoderStatus];
+ if (encodedData == null) {
+ Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
+ break;
+ }
+ // It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
+ encodedData.position(audioBufferInfo.offset);
+ encodedData.limit(audioBufferInfo.offset + audioBufferInfo.size);
+ if (muxerStarted)
+ mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo);
+ isRunning = isRunning && (audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
+ audioEncoder.releaseOutputBuffer(encoderStatus, false);
+ if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ break;
+ }
+ } catch (Exception e) {
+ Log.wtf(TAG, e);
+ break;
+ }
+ }
+ }
+ }
+
+ /**
+ * Release all resources. All already posted frames will be rendered first.
+ */
+ void release(MethodChannel.Result result) {
+ isRunning = false;
+ if (audioThreadHandler != null)
+ audioThreadHandler.post(() -> {
+ if (audioEncoder != null) {
+ audioEncoder.stop();
+ audioEncoder.release();
+ }
+ try {
+ mediaMuxer.stop();
+ mediaMuxer.release();
+ } catch (Exception e) {
+ // do nothing...
+ }
+ audioThread.quit();
+
+ result.success(null);
+ });
+ }
+}
diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioSamplesInterceptor.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioSamplesInterceptor.java
index ddc4d1ff2f..111ddb4090 100644
--- a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioSamplesInterceptor.java
+++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioSamplesInterceptor.java
@@ -1,20 +1,27 @@
package com.cloudwebrtc.webrtc.record;
import android.annotation.SuppressLint;
+import android.util.Log;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioSamples;
-import java.util.HashMap;
+import java.util.concurrent.ConcurrentHashMap;
/** JavaAudioDeviceModule allows attaching samples callback only on building
* We don't want to instantiate VideoFileRenderer and codecs at this step
* It's simple dummy class, it does nothing until samples are necessary */
@SuppressWarnings("WeakerAccess")
public class AudioSamplesInterceptor implements SamplesReadyCallback {
-
+ public static int id = 0;
+ private int _id;
+ private static final String TAG = "AudioSamplesInterceptor";
@SuppressLint("UseSparseArrays")
- protected final HashMap callbacks = new HashMap<>();
+ protected final ConcurrentHashMap callbacks = new ConcurrentHashMap<>();
+
+ public AudioSamplesInterceptor() {
+ this._id = id++;
+ }
@Override
public void onWebRtcAudioRecordSamplesReady(AudioSamples audioSamples) {
@@ -25,10 +32,12 @@ public void onWebRtcAudioRecordSamplesReady(AudioSamples audioSamples) {
public void attachCallback(Integer id, SamplesReadyCallback callback) throws Exception {
callbacks.put(id, callback);
+ Log.d(TAG, _id + " Attached callback "+callbacks.size());
}
public void detachCallback(Integer id) {
callbacks.remove(id);
+ Log.d(TAG, _id + " Detached callback "+callbacks.size());
}
}
diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/MediaRecorderImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/MediaRecorderImpl.java
index f1c45357bc..67fa770a40 100644
--- a/android/src/main/java/com/cloudwebrtc/webrtc/record/MediaRecorderImpl.java
+++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/MediaRecorderImpl.java
@@ -9,12 +9,15 @@
import java.io.File;
+import io.flutter.plugin.common.MethodChannel;
+
public class MediaRecorderImpl {
private final Integer id;
private final VideoTrack videoTrack;
private final AudioSamplesInterceptor audioInterceptor;
private VideoFileRenderer videoFileRenderer;
+ private AudioFileRenderer audioFileRenderer;
private boolean isRunning = false;
private File recordFile;
@@ -43,18 +46,20 @@ public void startRecording(File file) throws Exception {
} else {
Log.e(TAG, "Video track is null");
if (audioInterceptor != null) {
- //TODO(rostopira): audio only recording
- throw new Exception("Audio-only recording not implemented yet");
+ audioFileRenderer = new AudioFileRenderer(file);
+ audioInterceptor.attachCallback(id, audioFileRenderer);
}
}
}
public File getRecordFile() { return recordFile; }
- public void stopRecording() {
+ public void stopRecording(MethodChannel.Result result) {
isRunning = false;
if (audioInterceptor != null)
audioInterceptor.detachCallback(id);
+ audioFileRenderer.release(result);
+ audioFileRenderer = null;
if (videoTrack != null && videoFileRenderer != null) {
videoTrack.removeSink(videoFileRenderer);
videoFileRenderer.release();
diff --git a/example/android/gradle.properties b/example/android/gradle.properties
new file mode 100644
index 0000000000..7be3d8b468
--- /dev/null
+++ b/example/android/gradle.properties
@@ -0,0 +1,2 @@
+org.gradle.jvmargs=-Xmx1536M
+android.enableR8=true
diff --git a/ios/Classes/FlutterRTCAudioRecorder.h b/ios/Classes/FlutterRTCAudioRecorder.h
new file mode 100644
index 0000000000..ce8a9131a5
--- /dev/null
+++ b/ios/Classes/FlutterRTCAudioRecorder.h
@@ -0,0 +1,30 @@
+
+//
+// FlutterRTCAudioRecorder.h
+// Pods
+//
+// Created by Yonatan Naor on 21/03/2020.
+//
+
+#import
+#import
+
+NS_ASSUME_NONNULL_BEGIN
+
+typedef void (^ OnRecordingStopped)(bool);
+
+@interface FlutterRTCAudioRecorder : NSObject
+
+-(id) initWithPath:(NSString *) path;
+
+-(void)stop:(OnRecordingStopped) callback;
+
+-(void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder
+ successfully:(BOOL)flag;
+
+-(void)audioRecorderEncodeErrorDidOccur:(AVAudioRecorder *)recorder
+ error:(NSError *)error;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/ios/Classes/FlutterRTCAudioRecorder.m b/ios/Classes/FlutterRTCAudioRecorder.m
new file mode 100644
index 0000000000..4b8009dafb
--- /dev/null
+++ b/ios/Classes/FlutterRTCAudioRecorder.m
@@ -0,0 +1,70 @@
+
+//
+// FlutterRTCAudioRecorder.m
+// Pods
+//
+// Created by Yonatan Naor on 21/03/2020.
+//
+
+#import "FlutterRTCAudioRecorder.h"
+#import
+
+AVAudioSession* recordingSession;
+AVAudioRecorder *audioRecorder;
+
+@implementation FlutterRTCAudioRecorder
+
+OnRecordingStopped onRecordingStopped;
+
+-(id) initWithPath:(NSString *) path {
+ self = [super init];
+ recordingSession = [AVAudioSession sharedInstance];
+ [recordingSession setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
+ [recordingSession setActive:true error:nil];
+
+ [recordingSession requestRecordPermission:^(BOOL granted) {
+ if (granted) {
+ NSError* error;
+ NSDictionary* recordingSettings = @{
+ @"AVFormatIDKey": [NSNumber numberWithInt:kAudioFormatMPEG4AAC],
+ @"AVSampleRateKey": @12000,
+ @"AVNumberOfChannelsKey": @1,
+ @"AVEncoderAudioQualityKey": [NSNumber numberWithInt:AVAudioQualityMedium]
+ };
+ audioRecorder = [[AVAudioRecorder alloc] initWithURL:[NSURL URLWithString:path] settings: recordingSettings error:&error];
+
+ audioRecorder.delegate = self;
+
+ [audioRecorder prepareToRecord];
+ [audioRecorder record];
+ } else {
+ @throw [NSException
+ exceptionWithName:@"RecordigPermissionDenied"
+ reason:@"Recording permission not granted"
+ userInfo:nil];
+ }
+ }];
+ return self;
+}
+
+-(void)stop:(OnRecordingStopped) callback {
+ onRecordingStopped = callback;
+ if (audioRecorder != nil) {
+ [audioRecorder stop];
+ }
+}
+
+-(void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder
+ successfully:(BOOL)flag {
+ if (onRecordingStopped != nil) {
+ onRecordingStopped(flag);
+ }
+ onRecordingStopped = nil;
+}
+
+-(void)audioRecorderEncodeErrorDidOccur:(AVAudioRecorder *)recorder
+ error:(NSError *)error {
+ NSLog(@"Encode Error occurred");
+}
+
+@end
diff --git a/ios/Classes/FlutterWebRTCPlugin.h b/ios/Classes/FlutterWebRTCPlugin.h
index 3be67f3518..e0cb71ab27 100644
--- a/ios/Classes/FlutterWebRTCPlugin.h
+++ b/ios/Classes/FlutterWebRTCPlugin.h
@@ -15,6 +15,7 @@
@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory;
@property (nonatomic, strong) NSMutableDictionary *peerConnections;
@property (nonatomic, strong) NSMutableDictionary *localStreams;
+@property (nonatomic, strong) NSMutableDictionary *localSenders;
@property (nonatomic, strong) NSMutableDictionary *localTracks;
@property (nonatomic, strong) NSMutableDictionary *renders;
@property (nonatomic, retain) UIViewController *viewController;/*for broadcast or ReplayKit */
diff --git a/ios/Classes/FlutterWebRTCPlugin.m b/ios/Classes/FlutterWebRTCPlugin.m
index 4c66247e7e..f2a104cc8f 100644
--- a/ios/Classes/FlutterWebRTCPlugin.m
+++ b/ios/Classes/FlutterWebRTCPlugin.m
@@ -3,6 +3,7 @@
#import "FlutterRTCMediaStream.h"
#import "FlutterRTCPeerConnection.h"
#import "FlutterRTCVideoRenderer.h"
+#import "FlutterRTCAudioRecorder.h"
#import
#import
@@ -17,6 +18,8 @@ @implementation FlutterWebRTCPlugin {
@synthesize messenger = _messenger;
+FlutterRTCAudioRecorder* flutterRTCAudioRecorder;
+
+ (void)registerWithRegistrar:(NSObject *)registrar {
FlutterMethodChannel *channel =
@@ -61,6 +64,7 @@ - (instancetype)initWithChannel:(FlutterMethodChannel *)channel
self.peerConnections = [NSMutableDictionary new];
self.localStreams = [NSMutableDictionary new];
self.localTracks = [NSMutableDictionary new];
+ self.localSenders = [NSMutableDictionary new];
self.renders = [[NSMutableDictionary alloc] init];
[[NSNotificationCenter defaultCenter]
@@ -436,27 +440,39 @@ - (void)handleMethodCall:(FlutterMethodCall *)call
details:nil]);
}
result(nil);
- } else if ([@"mediaStreamRemoveTrack" isEqualToString:call.method]) {
- NSDictionary *argsMap = call.arguments;
- NSString *streamId = argsMap[@"streamId"];
- NSString *trackId = argsMap[@"trackId"];
- RTCMediaStream *stream = self.localStreams[streamId];
- if (stream) {
- RTCMediaStreamTrack *track = self.localTracks[trackId];
- if (track != nil) {
- if ([track isKindOfClass:[RTCAudioTrack class]]) {
- RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
- [stream removeAudioTrack:audioTrack];
- } else if ([track isKindOfClass:[RTCVideoTrack class]]) {
- RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
- [stream removeVideoTrack:videoTrack];
- }
+ } else if ([@"mediaStreamRemoveTrack" isEqualToString:call.method]){
+ NSDictionary* argsMap = call.arguments;
+ NSString* streamId = argsMap[@"streamId"];
+ NSString* trackId = argsMap[@"trackId"];
+ RTCMediaStream *stream = self.localStreams[streamId];
+ if (stream) {
+ RTCMediaStreamTrack *track = self.localTracks[trackId];
+ if(track != nil) {
+ if([track isKindOfClass:[RTCAudioTrack class]]) {
+ RTCAudioTrack *audioTrack = (RTCAudioTrack *)track;
+ [stream removeAudioTrack:audioTrack];
+ } else if ([track isKindOfClass:[RTCVideoTrack class]]){
+ RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
+ [stream removeVideoTrack:videoTrack];
+ }
+ } else {
+ result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Track is nil" message:nil details:nil]);
+ }
} else {
- result([FlutterError
- errorWithCode:@"mediaStreamRemoveTrack: Track is nil"
- message:nil
- details:nil]);
+ result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Stream is nil" message:nil details:nil]);
}
+ result(nil);
+ } else if([@"startRecordToFile" isEqualToString:call.method]){
+ NSDictionary* argsMap = call.arguments;
+ NSString* path = argsMap[@"path"];
+
+ flutterRTCAudioRecorder = [[FlutterRTCAudioRecorder alloc] initWithPath:path];
+ result(nil);
+ } else if([@"stopRecordToFile" isEqualToString:call.method]){
+ if (flutterRTCAudioRecorder != nil) {
+ [flutterRTCAudioRecorder stop:^(bool flag) {
+ result([NSNumber numberWithBool:(BOOL)flag]);
+ }];
} else {
result([FlutterError
errorWithCode:@"mediaStreamRemoveTrack: Stream is nil"
@@ -664,11 +680,13 @@ - (void)handleMethodCall:(FlutterMethodCall *)call
}
}
-- (void)dealloc {
+- (void) dealloc {
[_localTracks removeAllObjects];
_localTracks = nil;
[_localStreams removeAllObjects];
_localStreams = nil;
+ [_localSenders removeAllObjects];
+ _localSenders = nil;
for (NSString *peerConnectionId in _peerConnections) {
RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId];
diff --git a/lib/media_stream.dart b/lib/media_stream.dart
index cb6b28a408..e4206c7689 100644
--- a/lib/media_stream.dart
+++ b/lib/media_stream.dart
@@ -91,12 +91,12 @@ class MediaStream {
/// private: method.
void _setMediaTracks(List audioTracks, List videoTracks) {
List newAudioTracks = new List();
- audioTracks.forEach((trackInfo) {
+ (audioTracks ?? []).forEach((trackInfo) {
newAudioTracks.add(MediaStreamTrack.fromMap(trackInfo));
});
_audioTracks = newAudioTracks;
List newVideoTracks = new List();
- videoTracks.forEach((trackInfo) {
+ (videoTracks ?? []).forEach((trackInfo) {
newVideoTracks.add(MediaStreamTrack.fromMap(trackInfo));
});
_videoTracks = newVideoTracks;
diff --git a/lib/rtc_peerconnection.dart b/lib/rtc_peerconnection.dart
index b9710a8d30..1f67c960ec 100644
--- a/lib/rtc_peerconnection.dart
+++ b/lib/rtc_peerconnection.dart
@@ -408,6 +408,23 @@ class RTCPeerConnection {
}
}
+ Future closeSender(RTCRtpSender sender) async {
+ try {
+ final Map response = await _channel.invokeMethod(
+ 'closeSender', {
+ 'peerConnectionId': this._peerConnectionId,
+ 'senderId': sender.senderId
+ });
+ bool result = response["result"];
+ _senders.removeWhere((item) {
+ return sender.senderId == item.senderId;
+ });
+ return result;
+ } on PlatformException catch (e) {
+ throw 'Unable to RTCPeerConnection::removeTrack: ${e.message}';
+ }
+ }
+
Future addTransceiver(MediaStreamTrack track,
[RTCRtpTransceiverInit init]) async {
try {