8000 feat: Media Recorder implementation Android and iOS (#1823) · flutter-webrtc/flutter-webrtc@8a86c39 · GitHub < 10000 link rel="icon" class="js-site-favicon" type="image/svg+xml" href="https://github.githubassets.com/favicons/favicon.svg" data-base-href="https://github.githubassets.com/favicons/favicon">
[go: up one dir, main page]

Skip to content

Commit 8a86c39

Browse files
authored
feat: Media Recorder implementation Android and iOS (#1823)
* Revert "Revert "Media Recorder implementation Android and iOS (#1810)" (#1822)" This reverts commit 42eac74. * bump version for dart-webrtc and interface. * update.
1 parent 42eac74 commit 8a86c39

28 files changed

+778
-60
lines changed

android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java

Lines changed: 62 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import android.app.Activity;
55
import android.app.Fragment;
66
import android.app.FragmentTransaction;
7+
import android.content.ContentResolver;
78
import android.content.ContentValues;
89
import android.content.Context;
910
import android.content.Intent;
@@ -13,12 +14,14 @@
1314
import android.media.AudioDeviceInfo;
1415
import android.media.projection.MediaProjection;
1516
import android.media.projection.MediaProjectionManager;
17+
import android.net.Uri;
1618
import android.os.Build;
1719
import android.os.Build.VERSION;
1820
import android.os.Build.VERSION_CODES;
1921
import android.os.Bundle;
2022
import android.os.Handler;
2123
import android.os.Looper;
24+
import android.os.ParcelFileDescriptor;
2225
import android.os.ResultReceiver;
2326
import android.provider.MediaStore;
2427
import android.util.Log;
@@ -69,6 +72,9 @@
6972
import org.webrtc.audio.JavaAudioDeviceModule;
7073

7174
import java.io.File;
75+
import java.io.FileInputStream;
76+
import java.io.FileOutputStream;
77+
import java.io.InputStream;
7278
import java.util.ArrayList;
7379
import java.util.HashMap;
7480
import java.util.List;
@@ -963,22 +969,64 @@ void startRecordingToFile(
963969
mediaRecorders.append(id, mediaRecorder);
964970
}
965971

966-
void stopRecording(Integer id) {
967-
MediaRecorderImpl mediaRecorder = mediaRecorders.get(id);
968-
if (mediaRecorder != null) {
969-
mediaRecorder.stopRecording();
970-
mediaRecorders.remove(id);
971-
File file = mediaRecorder.getRecordFile();
972-
if (file != null) {
973-
ContentValues values = new ContentValues(3);
974-
values.put(MediaStore.Video.Media.TITLE, file.getName());
975-
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
976-
values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath());
977-
applicationContext
978-
.getContentResolver()
979-
.insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values);
972+
void stopRecording(Integer id, String albumName) {
973+
try {
974+
MediaRecorderImpl mediaRecorder = mediaRecorders.get(id);
975+
if (mediaRecorder != null) {
976+
mediaRecorder.stopRecording();
977+
mediaRecorders.remove(id);
978+
File file = mediaRecorder.getRecordFile();
979+
Uri collection;
980+
981+
if (file != null) {
982+
ContentValues values = new ContentValues();
983+
values.put(MediaStore.Video.Media.TITLE, file.getName());
984+
values.put(MediaStore.Video.Media.DISPLAY_NAME, file.getName());
985+
values.put(MediaStore.Video.Media.ALBUM, albumName);
986+
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
987+
values.put(MediaStore.Video.Media.DATE_ADDED, System.currentTimeMillis() / 1000);
988+
values.put(MediaStore.Video.Media.DATE_TAKEN, System.currentTimeMillis());
989+
990+
//Android version above 9 MediaStore uses RELATIVE_PATH
991+
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
992+
values.put(MediaStore.Video.Media.RELATIVE_PATH, "Movies/" + albumName);
993+
values.put(MediaStore.Video.Media.IS_PENDING, 1);
994+
995+
collection = MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY);
996+
} else {
997+
//Android version 9 and below MediaStore uses DATA
998+
values.put(MediaStore.Video.Media.DATA, "/storage/emulated/0/Movies/" + albumName + "/" + file.getName());
999+
1000+
collection = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
1001+
}
1002+
1003+
ContentResolver resolver = applicationContext.getContentResolver();
1004+
Uri uriSavedMedia = resolver.insert(collection, values);
1005+
1006+
assert uriSavedMedia != null;
1007+
ParcelFileDescriptor pfd = resolver.openFileDescriptor(uriSavedMedia, "w");
1008+
assert pfd != null;
1009+
FileOutputStream out = new FileOutputStream(pfd.getFileDescriptor());
1010+
1011+
InputStream in = new FileInputStream(file);
1012+
1013+
byte[] buf = new byte[8192];
1014+
int len;
1015+
1016+
while ((len = in.read(buf)) > 0) {
1017+
out.write(buf, 0, len);
1018+
}
1019+
1020+
out.close();
1021+
in.close();
1022+
pfd.close();
1023+
values.clear();
1024+
}
9801025
}
1026+
} catch(Exception e){
1027+
9811028
}
1029+
9821030
}
9831031

9841032

android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -770,7 +770,8 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) {
770770
break;
771771
case "stopRecordToFile":
772772
Integer recorderId = call.argument("recorderId");
773-
getUserMediaImpl.stopRecording(recorderId);
773+
String albumName = call.argument("albumName");
774+
getUserMediaImpl.stopRecording(recorderId, albumName);
774775
result.success(null);
775776
break;
776777
case "captureFrame": {
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
void RTCAudioSinkCallback (void *object,
2+
const void *audio_data,
3+
int bits_per_sample,
4+
int sample_rate,
5+
size_t number_of_channels,
6+
size_t number_of_frames);
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
#import <Foundation/Foundation.h>
2+
#import <CoreMedia/CoreMedia.h>
3+
#import <WebRTC/WebRTC.h>
4+
5+
@interface FlutterRTCAudioSink : NSObject
6+
7+
@property (nonatomic, copy) void (^bufferCallback)(CMSampleBufferRef);
8+
@property (nonatomic) CMAudioFormatDescriptionRef format;
9+
10+
- (instancetype) initWithAudioTrack:(RTCAudioTrack*)audio;
11+
12+
- (void) close;
13+
14+
@end
Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
#import <AVFoundation/AVFoundation.h>
2+
#import "FlutterRTCAudioSink.h"
3+
#import "RTCAudioSource+Private.h"
4+
#include "media_stream_interface.h"
5+
#include "audio_sink_bridge.cpp"
6+
7+
@implementation FlutterRTCAudioSink {
8+
AudioSinkBridge *_bridge;
9+
webrtc::AudioSourceInterface* _audioSource;
10+
}
11+
12+
- (instancetype) initWithAudioTrack:(RTCAudioTrack* )audio {
13+
self = [super init];
14+
rtc::scoped_refptr<webrtc::AudioSourceInterface> audioSourcePtr = audio.source.nativeAudioSource;
15+
_audioSource = audioSourcePtr.get();
16+
_bridge = new AudioSinkBridge((void*)CFBridgingRetain(self));
17+
_audioSource->AddSink(_bridge);
18+
return self;
19+
}
20+
21+
- (void) close {
22+
_audioSource->RemoveSink(_bridge);
23+
delete _bridge;
24+
_bridge = nil;
25+
_audioSource = nil;
26+
}
27+
28+
void RTCAudioSinkCallback (void *object, const void *audio_data, int bits_per_sample, int sample_rate, size_t number_of_channels, size_t number_of_frames)
29+
{
30+
AudioBufferList audioBufferList;
31+
AudioBuffer audioBuffer;
32+
audioBuffer.mData = (void*) audio_data;
33+
audioBuffer.mDataByteSize = bits_per_sample / 8 * number_of_channels * number_of_frames;
34+
audioBuffer.mNumberChannels = number_of_channels;
35+
audioBufferList.mNumberBuffers = 1;
36+
audioBufferList.mBuffers[0] = audioBuffer;
37+
AudioStreamBasicDescription audioDescription;
38+
audioDescription.mBytesPerFrame = bits_per_sample / 8 * number_of_channels;
39+
audioDescription.mBitsPerChannel = bits_per_sample;
40+
audioDescription.mBytesPerPacket = bits_per_sample / 8 * number_of_channels;
41+
audioDescription.mChannelsPerFrame = number_of_channels;
42+
audioDescription.mFormatID = kAudioFormatLinearPCM;
43+
audioDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
44+
audioDescription.mFramesPerPacket = 1;
45+
audioDescription.mReserved = 0;
46+
audioDescription.mSampleRate = sample_rate;
47+
CMAudioFormatDescriptionRef formatDesc;
48+
CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &audioDescription, 0, nil, 0, nil, nil, &formatDesc);
49+
CMSampleBufferRef buffer;
50+
CMSampleTimingInfo timing;
51+
timing.decodeTimeStamp = kCMTimeInvalid;
52+
timing.presentationTimeStamp = CMTimeMake(0, sample_rate);
53+
timing.duration = CMTimeMake(1, sample_rate);
54+
CMSampleBufferCreate(kCFAllocatorDefault, nil, false, nil, nil, formatDesc, number_of_frames * number_of_channels, 1, &timing, 0, nil, &buffer);
55+
CMSampleBufferSetDataBufferFromAudioBufferList(buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList);
56+
@autoreleasepool {
57+
FlutterRTCAudioSink* sink = (__bridge FlutterRTCAudioSink*)(object);
58+
sink.format = formatDesc;
59+
if (sink.bufferCallback != nil) {
60+
sink.bufferCallback(buffer);
61+
} else {
62+
NSLog(@"Buffer callback is nil");
63+
}
64+
}
65+
}
66+
67+
@end

common/darwin/Classes/FlutterRTCFrameCapturer.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,6 @@
1212
toPath:(NSString*)path
1313
result:(FlutterResult)result;
1414

15+
+ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame;
16+
1517
@end

common/darwin/Classes/FlutterRTCFrameCapturer.m

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ - (void)renderFrame:(nullable RTCVideoFrame*)frame {
4141
CVPixelBufferRef pixelBufferRef;
4242
bool shouldRelease;
4343
if (![buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
44-
pixelBufferRef = [self convertToCVPixelBuffer:frame];
44+
pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame];
4545
shouldRelease = true;
4646
} else {
4747
pixelBufferRef = ((RTCCVPixelBuffer*)buffer).pixelBuffer;
@@ -108,7 +108,7 @@ - (void)renderFrame:(nullable RTCVideoFrame*)frame {
108108
});
109109
}
110110

111-
- (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame {
111+
+ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame {
112112
id<RTCI420Buffer> i420Buffer = [frame.buffer toI420];
113113
CVPixelBufferRef outputPixelBuffer;
114114
size_t w = (size_t)roundf(i420Buffer.width);
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
#if TARGET_OS_IPHONE
2+
#import <Flutter/Flutter.h>
3+
#elif TARGET_OS_OSX
4+
#import <FlutterMacOS/FlutterMacOS.h>
5+
#endif
6+
#import <WebRTC/WebRTC.h>
7+
8+
@import Foundation;
9+
@import AVFoundation;
10+
11+
@interface FlutterRTCMediaRecorder : NSObject <RTCVideoRenderer>
12+
13+
@property(nonatomic, strong) RTCVideoTrack* _Nullable videoTrack;
14+
@property(nonatomic, strong) NSURL* _Nonnull output;
15+
@property(nonatomic, strong) AVAssetWriter* _Nullable assetWriter;
16+
@property(nonatomic, strong) AVAssetWriterInput* _Nullable writerInput;
17+
18+
- (instancetype _Nonnull)initWithVideoTrack:(RTCVideoTrack* _Nullable)video
19+
audioTrack:(RTCAudioTrack* _Nullable)audio
20+
outputFile:(NSURL* _Nonnull)out;
21+
22+
- (void)stop:(_Nonnull FlutterResult)result;
23+
24+
@end

0 commit comments

Comments
 (0)
0