8000 Media recorder support for iOS/OSX by rostopira · Pull Request #1189 · flutter-webrtc/flutter-webrtc · GitHub
[go: up one dir, main page]

Skip to content
8000

Media recorder support for iOS/OSX #1189

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions common/darwin/Classes/FlutterRTCAudioSink-Interface.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
void RTCAudioSinkCallback (void *object,
const void *audio_data,
int bits_per_sample,
int sample_rate,
size_t number_of_channels,
size_t number_of_frames);
14 changes: 14 additions & 0 deletions common/darwin/Classes/FlutterRTCAudioSink.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#import <Foundation/Foundation.h>
#import <CoreMedia/CoreMedia.h>
#import <WebRTC/WebRTC.h>

@interface FlutterRTCAudioSink : NSObject

@property (nonatomic, copy) void (^bufferCallback)(CMSampleBufferRef);
@property (nonatomic) CMAudioFormatDescriptionRef format;

- (instancetype) initWithAudioTrack:(RTCAudioTrack*)audio;

- (void) close;

@end
67 changes: 67 additions & 0 deletions common/darwin/Classes/FlutterRTCAudioSink.mm
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#import <AVFoundation/AVFoundation.h>
#import "FlutterRTCAudioSink.h"
#import "RTCAudioSource+Private.h"
#include "media_stream_interface.h"
#include "audio_sink_bridge.cpp"

@implementation FlutterRTCAudioSink {
AudioSinkBridge *_bridge;
webrtc::AudioSourceInterface* _audioSource;
}

- (instancetype) initWithAudioTrack:(RTCAudioTrack* )audio {
self = [super init];
rtc::scoped_refptr<webrtc::AudioSourceInterface> audioSourcePtr = audio.source.nativeAudioSource;
_audioSource = audioSourcePtr.get();
_bridge = new AudioSinkBridge((void*)CFBridgingRetain(self));
_audioSource->AddSink(_bridge);
return self;
}

- (void) close {
_audioSource->RemoveSink(_bridge);
delete _bridge;
_bridge = nil;
_audioSource = nil;
}

void RTCAudioSinkCallback (void *object, const void *audio_data, int bits_per_sample, int sample_rate, size_t number_of_channels, size_t number_of_frames)
{
AudioBufferList audioBufferList;
AudioBuffer audioBuffer;
audioBuffer.mData = (void*) audio_data;
audioBuffer.mDataByteSize = bits_per_sample / 8 * number_of_channels * number_of_frames;
audioBuffer.mNumberChannels = number_of_channels;
audioBufferList.mNumberBuffers = 1;
audioBufferList.mBuffers[0] = audioBuffer;
AudioStreamBasicDescription audioDescription;
audioDescription.mBytesPerFrame = bits_per_sample / 8 * number_of_channels;
audioDescription.mBitsPerChannel = bits_per_sample;
audioDescription.mBytesPerPacket = bits_per_sample / 8 * number_of_channels;
audioDescription.mChannelsPerFrame = number_of_channels;
audioDescription.mFormatID = kAudioFormatLinearPCM;
audioDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
audioDescription.mFramesPerPacket = 1;
audioDescription.mReserved = 0;
audioDescription.mSampleRate = sample_rate;
CMAudioFormatDescriptionRef formatDesc;
CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &audioDescription, 0, nil, 0, nil, nil, &formatDesc);
CMSampleBufferRef buffer;
CMSampleTimingInfo timing;
timing.decodeTimeStamp = kCMTimeInvalid;
timing.presentationTimeStamp = CMTimeMake(0, sample_rate);
timing.duration = CMTimeMake(1, sample_rate);
CMSampleBufferCreate(kCFAllocatorDefault, nil, false, nil, nil, formatDesc, number_of_frames * number_of_channels, 1, &timing, 0, nil, &buffer);
CMSampleBufferSetDataBufferFromAudioBufferList(buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList);
@autoreleasepool {
FlutterRTCAudioSink* sink = (__bridge FlutterRTCAudioSink*)(object);
sink.format = formatDesc;
if (sink.bufferCallback != nil) {
sink.bufferCallback(buffer);
} else {
NSLog(@"Buffer callback is nil");
}
}
}

@end
2 changes: 2 additions & 0 deletions common/darwin/Classes/FlutterRTCFrameCapturer.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,6 @@

- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result;

+ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame;

@end
4 changes: 2 additions & 2 deletions common/darwin/Classes/FlutterRTCFrameCapturer.m
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ - (void)renderFrame:(nullable RTCVideoFrame *)frame
CVPixelBufferRef pixelBufferRef;
bool shouldRelease;
if (![buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
pixelBufferRef = [self convertToCVPixelBuffer:frame];
pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame];
shouldRelease = true;
} else {
pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer;
Expand Down Expand Up @@ -113,7 +113,7 @@ - (void)renderFrame:(nullable RTCVideoFrame *)frame
});
}

-(CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame
+ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame
{
id<RTCI420Buffer> i420Buffer = [frame.buffer toI420];
CVPixelBufferRef outputPixelBuffer;
Expand Down
27 changes: 27 additions & 0 deletions common/darwin/Classes/FlutterRTCMediaRecorder.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
#if TARGET_OS_IPHONE
#import <Flutter/Flutter.h>
#elif TARGET_OS_OSX
#import <FlutterMacOS/FlutterMacOS.h>
#endif
#import <WebRTC/WebRTC.h>

@import Foundation;
@import AVFoundation;

@interface FlutterRTCMediaRecorder : NSObject<RTCVideoRenderer>

@property (nonatomic, strong) RTCVideoTrack * _Nullable videoTrack;
@property (nonatomic, strong) NSURL * _Nonnull output;
@property (nonatomic, strong) AVAssetWriter * _Nullable assetWriter;
@property (nonatomic, strong) AVAssetWriterInput * _Nullable writerInput;

- (instancetype _Nonnull) initWithVideoTrack:(RTCVideoTrack * _Nullable)video
rotationDegrees:(NSNumber * _Nonnull)rotation
audioTrack:(RTCAudioTrack * _Nullable)audio
outputFile:(NSURL * _Nonnull)out;

- (void) changeVideoTrack:(RTCVideoTrack * _Nonnull) track;

- (void) stop:(_Nonnull FlutterResult) result;

@end
191 changes: 191 additions & 0 deletions common/darwin/Classes/FlutterRTCMediaRecorder.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,191 @@
#import <WebRTC/WebRTC.h>
#import "FlutterRTCMediaRecorder.h"
#import "FlutterRTCAudioSink.h"
#import "FlutterRTCFrameCapturer.h"

@import AVFoundation;

@implementation FlutterRTCMediaRecorder {
int framesCount;
bool isInitialized;
CGSize _renderSize;
RTCVideoRotation _rotation;
FlutterRTCAudioSink* _audioSink;
AVAssetWriterInput* _audioWriter;
int _additionalRotation;
int64_t _startTime;
}

- (instancetype)initWithVideoTrack:(RTCVideoTrack *)video rotationDegrees:(NSNumber *)rotation audioTrack:(RTCAudioTrack *)audio outputFile:(NSURL *)out {
self = [super init];
_rotation = -1;
isInitialized = false;
self.videoTrack = video;
self.output = out;
_additionalRotation = rotation.intValue;
[video addRenderer:self];
framesCount = 0;
if (audio != nil)
_audioSink = [[FlutterRTCAudioSink alloc] initWithAudioTrack:audio];
else
NSLog(@"Audio track is nil");
_startTime = -1;
return self;
}

- (void)changeVideoTrack:(RTCVideoTrack *)track {
if (self.videoTrack) {
[self.videoTrack removeRenderer:self];
}
self.videoTrack = track;
[track addRenderer:self];
}

- (void)initialize:(CGSize)size {
_renderSize = size;
NSDictionary *videoSettings = @{
AVVideoCompressionPropertiesKey: @{AVVideoAverageBitRateKey: @(6*1024*1024)},
AVVideoCodecKey: AVVideoCodecTypeH264,
AVVideoHeightKey: @(size.height),
AVVideoWidthKey: @(size.width),
};
self.writerInput = [[AVAssetWriterInput alloc]
initWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
self.writerInput.expectsMediaDataInRealTime = true;
self.writerInput.mediaTimeScale = 30;
int rotationDegrees = _additionalRotation;
switch (_rotation) {
case RTCVideoRotation_0: break;
case RTCVideoRotation_90: rotationDegrees += 90; break;
case RTCVideoRotation_180: rotationDegrees += 180; break;
case RTCVideoRotation_270: rotationDegrees += 270; break;
default: break;
}
rotationDegrees %= 360;
self.writerInput.transform = CGAffineTransformMakeRotation(M_PI * rotationDegrees / 180);

if (_audioSink != nil) {
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioSettings = @{
AVFormatIDKey: [NSNumber numberWithInt: kAudioFormatMPEG4AAC],
AVNumberOfChannelsKey: @1,
AVSampleRateKey: @44100.0,
AVChannelLayoutKey: [NSData dataWithBytes:&acl length:sizeof(AudioChannelLayout)],
AVEncoderBitRateKey: @64000,
};
_audioWriter = [[AVAssetWriterInput alloc]
initWithMediaType:AVMediaTypeAudio
outputSettings:audioSettings
sourceFormatHint:_audioSink.format];
_audioWriter.expectsMediaDataInRealTime = true;
}

NSError *error;
self.assetWriter = [[AVAssetWriter alloc]
initWithURL:self.output
fileType:AVFileTypeMPEG4
error:&error];
if (error != nil)
NSLog(@"%@",[error localizedDescription]);
self.assetWriter.shouldOptimizeForNetworkUse = true;
[self.assetWriter addInput:self.writerInput];
if (_audioWriter != nil) {
[self.assetWriter addInput:_audioWriter];
_audioSink.bufferCallback = ^(CMSampleBufferRef buffer){
if (self->_audioWriter.readyForMoreMediaData) {
if ([self->_audioWriter appendSampleBuffer:buffer])
NSLog(@"Audio frame appended");
else
NSLog(@"Audioframe not appended %@", self.assetWriter.error);
}
};
}
[self.assetWriter startWriting];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];

isInitialized = true;
}

- (void)setSize:(CGSize)size {
}

- (void)renderFrame:(nullable RTCVideoFrame *)frame {
if (frame == nil) {
return;
}
if (!isInitialized) {
_rotation = frame.rotation;
[self initialize:CGSizeMake((CGFloat) frame.width, (CGFloat) frame.height)];
}
if (!self.writerInput.readyForMoreMediaData) {
NSLog(@"Drop frame, not ready");
return;
}
id <RTCVideoFrameBuffer> buffer = frame.buffer;
CVPixelBufferRef pixelBufferRef;
BOOL shouldRelease = false;
if ([buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer;
} else {
pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame];
shouldRelease = true;
}
CMVideoFormatDescriptionRef formatDescription;
OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBufferRef, &formatDescription);

CMSampleTimingInfo timingInfo;

timingInfo.decodeTimeStamp = kCMTimeInvalid;
if (_startTime == -1) {
_startTime = frame.timeStampNs / 1000;
}
int64_t frameTime = (frame.timeStampNs / 1000) - _startTime;
timingInfo.presentationTimeStamp = CMTimeMake(frameTime, 1000000);
framesCount++;

CMSampleBufferRef outBuffer;

status = CMSampleBufferCreateReadyWithImageBuffer(
kCFAllocatorDefault,
pixelBufferRef,
formatDescription,
&timingInfo,
&outBuffer
);

if (![self.writerInput appendSampleBuffer:outBuffer]) {
NSLog(@"Frame not appended %@", self.assetWriter.error);
}
#if TARGET_OS_IPHONE
if (shouldRelease) {
CVPixelBufferRelease(pixelBufferRef);
}
#endif
}

- (void)stop:(FlutterResult _Nonnull) result {
if (_audioSink != nil) {
_audioSink.bufferCallback = nil;
[_audioSink close];
}
[self.videoTrack removeRenderer:self];
[self.writerInput markAsFinished];
[_audioWriter markAsFinished];
dispatch_async(dispatch_get_main_queue(), ^{
[self.assetWriter finishWritingWithCompletionHandler:^{
NSError* error = self.assetWriter.error;
if (error == nil) {
result(nil);
} else {
result([FlutterError errorWithCode:@"Failed to save recording"
message:[error localizedDescription]
details:nil]);
}
}];
});
}

@end
2 changes: 2 additions & 0 deletions common/darwin/Classes/FlutterWebRTCPlugin.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

@class FlutterRTCVideoRenderer;
@class FlutterRTCFrameCapturer;
@class FlutterRTCMediaRecorder;

typedef void (^CompletionHandler)(void);

Expand All @@ -25,6 +26,7 @@ typedef void (^CapturerStopHandler)(CompletionHandler handler);
@property (nonatomic, strong) NSMutableDictionary<NSString *, RTCMediaStream *> *localStreams;
@property (nonatomic, strong) NSMutableDictionary<NSString *, RTCMediaStreamTrack *> *localTracks;
@property (nonatomic, strong) NSMutableDictionary<NSNumber *, FlutterRTCVideoRenderer *> *renders;
@property (nonatomic, strong) NSMutableDictionary<NSNumber *, FlutterRTCMediaRecorder *> *recorders;
@property (nonatomic, strong) NSMutableDictionary<NSString *, CapturerStopHandler> *videoCapturerStopHandlers;

#if TARGET_OS_IPHONE
Expand Down
Loading
0