8000 Implemented audio only recording in media recorder for android and ios by yonatann · Pull Request #207 · flutter-webrtc/flutter-webrtc · GitHub
[go: up one dir, main page]

Skip to content

Implemented audio only recording in media recorder for android and ios #207

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
May 11, 2020
Merged
19 changes: 19 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
.DS_Store
.packages
.vscode/.DS_Store
.dart_tool/
example/pubspec.lock
pubspec.lock
example/ios/Podfile.lock
Expand All @@ -13,3 +14,21 @@ example/android/gradle*
WorkspaceSettings.xcsettings
example/.flutter-plugins
example/android/local.properties
android/.gradle
android/.settings
android/gradle
android/gradlew
android/gradlew.bat
android/local.properties
example/android/.settings
example/.flutter-plugins-dependencies
example/android/.project/
example/android/app/.classpath
example/ios/Flutter/flutter_export_environment.sh
example/ios/Flutter/Generated.xcconfig
example/ios/Pods/.symlinks
example/ios/Pods/Local Podspecs/
example/ios/Runner/GeneratedPluginRegistrant.h
example/ios/Runner/GeneratedPluginRegistrant.m
example/ios/Runner.xcworkspace/xcuserdata/
example/android/.project
6 changes: 6 additions & 0 deletions android/.classpath
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8/"/>
<classpathentry kind="con" path="org.eclipse.buildship.core.gradleclasspathcontainer"/>
<classpathentry kind="output" path="bin/default"/>
</classpath>
23 changes: 23 additions & 0 deletions android/.project
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>flutter_webrtc</name>
<comment>Project android_ created by Buildship.</comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.buildship.core.gradleprojectbuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.buildship.core.gradleprojectnature</nature>
</natures>
</projectDescription>
Original file line number Diff line number Diff line change
Expand Up @@ -421,8 +421,7 @@ public void onMethodCall(MethodCall call, Result notSafeResult) {
}
} else if (call.method.equals("stopRecordToFile")) {
Integer recorderId = call.argument("recorderId");
getUserMediaImpl.stopRecording(recorderId);
result.success(null);
getUserMediaImpl.stopRecording(recorderId, result);
} else if (call.method.equals("captureFrame")) {
String path = call.argument("path");
String videoTrackId = call.argument("trackId");
Expand Down Expand Up @@ -477,6 +476,10 @@ public void onMethodCall(MethodCall call, Result notSafeResult) {
String kind = call.argument("kind");
String streamId = call.argument("streamId");
createSender(peerConnectionId, kind, streamId, result);
} else if (call.method.equals("closeSender")) {
String peerConnectionId = call.argument("peerConnectionId");
String senderId = call.argument("senderId");
stopSender(peerConnectionId, senderId, result);
} else if (call.method.equals("addTrack")) {
String peerConnectionId = call.argument("peerConnectionId");
String trackId = call.argument("trackId");
Expand Down Expand Up @@ -1000,7 +1003,7 @@ private void mediaStreamTrackStop(final String id) {
private void mediaStreamTrackSetEnabled(final String id, final boolean enabled) {
MediaStreamTrack track = getTrackForId(id);
if (track == null) {
Log.d(TAG, "mediaStreamTrackSetEnabled() track is null");
Log.d(TAG, "mediaStreamTrackSetEnabled() track is null " + id);
return;
} else if (track.enabled() == enabled) {
return;
Expand Down Expand Up @@ -1386,8 +1389,21 @@ private void createSender(String peerConnectionId, String kind, String streamId,
}
}

private void addTrack(String peerConnectionId, String trackId, List<String> streamIds, Result result) {
PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId);

private void stopSender(String peerConnectionId, String senderId, Result result) {
PeerConnectionObserver pco
= mPeerConnectionObservers.get(peerConnectionId);
if (pco == null || pco.getPeerConnection() == null) {
Log.d(TAG, "removeTrack() peerConnection is null");
result.error("removeTrack", "removeTrack() peerConnection is null", null);
} else {
pco.closeSender(senderId, result);
}
}

private void addTrack(String peerConnectionId, String trackId, List<String> streamIds, Result result){
PeerConnectionObserver pco
= mPeerConnectionObservers.get(peerConnectionId);
MediaStreamTrack track = localTracks.get(trackId);
if (track == null) {
result.error("addTrack", "addTrack() track is null", null);
Expand Down
13 changes: 3 additions & 10 deletions android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -585,6 +585,7 @@ private void getUserMedia(
}

String id = track.id();
Log.d(TAG, "MediaStream Track id: " + id);

if (track instanceof AudioTrack) {
mediaStream.addTrack((AudioTrack) track);
Expand Down Expand Up @@ -792,19 +793,11 @@ else if (audioChannel == AudioChannel.OUTPUT) {
mediaRecorders.append(id, mediaRecorder);
}

void stopRecording(Integer id) {
void stopRecording(Integer id, Result result) {
MediaRecorderImpl mediaRecorder = mediaRecorders.get(id);
if (mediaRecorder != null) {
mediaRecorder.stopRecording();
mediaRecorder.stopRecording(result);
mediaRecorders.remove(id);
File file = mediaRecorder.getRecordFile();
if (file != null) {
ContentValues values = new ContentValues(3);
values.put(MediaStore.Video.Media.TITLE, file.getName());
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath());
applicationContext.getContentResolver().insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values);
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ void close() {
remoteTracks.clear();
dataChannels.clear();
transceivers.clear();
senders.clear();;
receivers.clear();;
senders.clear();
receivers.clear();
}

void dispose() {
Expand Down Expand Up @@ -641,6 +641,14 @@ public void createSender(String kind, String streamId, Result result){
result.success(rtpSenderToMap(sender));
}

public void closeSender(String senderId, Result result) {
RtpSender sender = senders.get(senderId);
sender.dispose();
Map<String, Object> params = new HashMap<>();
params.put("result", true);
result.success(params);
}

public void addTrack(MediaStreamTrack track, List<String> streamIds, Result result){
RtpSender sender = peerConnection.addTrack(track, streamIds);
senders.put(sender.id(),sender);
Expand All @@ -654,8 +662,8 @@ public void removeTrack(String senderId, Result result){
return;
}
boolean res = peerConnection.removeTrack(sender);
ConstraintsMap params = new ConstraintsMap();
params.putBoolean("result", res);
Map<String, Object> params = new HashMap<>();
params.put("result", res);
result.success(params);
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
package com.cloudwebrtc.webrtc.record;

import org.webrtc.audio.JavaAudioDeviceModule;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;

import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;

import io.flutter.plugin.common.MethodChannel;

import static android.media.MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;

public class AudioFileRenderer implements JavaAudioDeviceModule.SamplesReadyCallback {
private static final String TAG = "AudioFileRenderer";
private boolean isRunning = true;

private final HandlerThread audioThread;
private final Handler audioThreadHandler;
private MediaCodec audioEncoder;
private ByteBuffer[] audioInputBuffers;
private ByteBuffer[] audioOutputBuffers;
private MediaCodec.BufferInfo audioBufferInfo;
private MediaMuxer mediaMuxer;
private int trackIndex = -1;
private int audioTrackIndex = -1;
private long presTime = 0L;
private volatile boolean muxerStarted = false;

AudioFileRenderer(File outputFile) throws IOException {
audioThread = new HandlerThread(TAG + "AudioThread");
audioThread.start();
audioThreadHandler = new Handler(audioThread.getLooper());

mediaMuxer = new MediaMuxer(outputFile.getPath(),
MUXER_OUTPUT_MPEG_4);
}

@Override
public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) {
audioThreadHandler.post(() -> {
if (audioEncoder == null) try {
audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, audioSamples.getChannelCount());
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, audioSamples.getSampleRate());
format.setInteger(MediaFormat.KEY_BIT_RATE, 64 * 1024);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
audioEncoder.start();
audioInputBuffers = audioEncoder.getInputBuffers();
audioOutputBuffers = audioEncoder.getOutputBuffers();
} catch (IOException exception) {
Log.wtf(TAG, exception);
}
int bufferIndex = audioEncoder.dequeueInputBuffer(0);
if (bufferIndex >= 0) {
ByteBuffer buffer = audioInputBuffers[bufferIndex];
buffer.clear();
byte[] data = audioSamples.getData();
buffer.put(data);
audioEncoder.queueInputBuffer(bufferIndex, 0, data.length, presTime, 0);
presTime += data.length * 125 / 12; // 1000000 microseconds / 48000hz / 2 bytes
}
drainAudio();
});
}

private void drainAudio() {
if (audioBufferInfo == null)
audioBufferInfo = new MediaCodec.BufferInfo();
while (true) {
int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
audioOutputBuffers = audioEncoder.getOutputBuffers();
Log.d(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = audioEncoder.getOutputFormat();

Log.d(TAG, "encoder output format changed: " + newFormat);
audioTrackIndex = mediaMuxer.addTrack(newFormat);
if (audioTrackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.d(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = audioOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(audioBufferInfo.offset);
encodedData.limit(audioBufferInfo.offset + audioBufferInfo.size);
if (muxerStarted)
mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo);
isRunning = isRunning && (audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
audioEncoder.releaseOutputBuffer(encoderStatus, false);
if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}

/**
* Release all resources. All already posted frames will be rendered first.
*/
void release(MethodChannel.Result result) {
isRunning = false;
if (audioThreadHandler != null)
audioThreadHandler.post(() -> {
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
}
try {
mediaMuxer.stop();
mediaMuxer.release();
} catch (Exception e) {
// do nothing...
}
audioThread.quit();

result.success(null);
});
}
}
Original file line number Diff line number Diff line change
@@ -1,20 +1,27 @@
package com.cloudwebrtc.webrtc.record;

import android.annotation.SuppressLint;
import android.util.Log;

import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
import org.webrtc.audio.JavaAudioDeviceModule.AudioSamples;

import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;

/** JavaAudioDeviceModule allows attaching samples callback only on building
* We don't want to instantiate VideoFileRenderer and codecs at this step
* It's simple dummy class, it does nothing until samples are necessary */
@SuppressWarnings("WeakerAccess")
public class AudioSamplesInterceptor implements SamplesReadyCallback {

public static int id = 0;
private int _id;
private static final String TAG = "AudioSamplesInterceptor";
@SuppressLint("UseSparseArrays")
protected final HashMap<Integer, SamplesReadyCallback> callbacks = new HashMap<>();
protected final ConcurrentHashMap<Integer, SamplesReadyCallback> callbacks = new ConcurrentHashMap<>();

public AudioSamplesInterceptor() {
this._id = id++;
}

@Override
public void onWebRtcAudioRecordSamplesReady(AudioSamples audioSamples) {
Expand All @@ -25,10 +32,12 @@ public void onWebRtcAudioRecordSamplesReady(AudioSamples audioSamples) {

public void attachCallback(Integer id, SamplesReadyCallback callback) throws Exception {
callbacks.put(id, callback);
Log.d(TAG, _id + " Attached callback "+callbacks.size());
}

public void detachCallback(Integer id) {
callbacks.remove(id);
Log.d(TAG, _id + " Detached callback "+callbacks.size());
}

}
Loading
0