diff --git a/common/darwin/Classes/FlutterRTCAudioSink-Interface.h b/common/darwin/Classes/FlutterRTCAudioSink-Interface.h new file mode 100644 index 0000000000..8a0352333d --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink-Interface.h @@ -0,0 +1,6 @@ +void RTCAudioSinkCallback (void *object, + const void *audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames); diff --git a/common/darwin/Classes/FlutterRTCAudioSink.h b/common/darwin/Classes/FlutterRTCAudioSink.h new file mode 100644 index 0000000000..34cf46669c --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink.h @@ -0,0 +1,14 @@ +#import +#import +#import + +@interface FlutterRTCAudioSink : NSObject + +@property (nonatomic, copy) void (^bufferCallback)(CMSampleBufferRef); +@property (nonatomic) CMAudioFormatDescriptionRef format; + +- (instancetype) initWithAudioTrack:(RTCAudioTrack*)audio; + +- (void) close; + +@end diff --git a/common/darwin/Classes/FlutterRTCAudioSink.mm b/common/darwin/Classes/FlutterRTCAudioSink.mm new file mode 100644 index 0000000000..4fb575b398 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink.mm @@ -0,0 +1,67 @@ +#import +#import "FlutterRTCAudioSink.h" +#import "RTCAudioSource+Private.h" +#include "media_stream_interface.h" +#include "audio_sink_bridge.cpp" + +@implementation FlutterRTCAudioSink { + AudioSinkBridge *_bridge; + webrtc::AudioSourceInterface* _audioSource; +} + +- (instancetype) initWithAudioTrack:(RTCAudioTrack* )audio { + self = [super init]; + rtc::scoped_refptr audioSourcePtr = audio.source.nativeAudioSource; + _audioSource = audioSourcePtr.get(); + _bridge = new AudioSinkBridge((void*)CFBridgingRetain(self)); + _audioSource->AddSink(_bridge); + return self; +} + +- (void) close { + _audioSource->RemoveSink(_bridge); + delete _bridge; + _bridge = nil; + _audioSource = nil; +} + +void RTCAudioSinkCallback (void *object, const void *audio_data, int bits_per_sample, int sample_rate, size_t number_of_channels, size_t number_of_frames) +{ + AudioBufferList audioBufferList; + AudioBuffer audioBuffer; + audioBuffer.mData = (void*) audio_data; + audioBuffer.mDataByteSize = bits_per_sample / 8 * number_of_channels * number_of_frames; + audioBuffer.mNumberChannels = number_of_channels; + audioBufferList.mNumberBuffers = 1; + audioBufferList.mBuffers[0] = audioBuffer; + AudioStreamBasicDescription audioDescription; + audioDescription.mBytesPerFrame = bits_per_sample / 8 * number_of_channels; + audioDescription.mBitsPerChannel = bits_per_sample; + audioDescription.mBytesPerPacket = bits_per_sample / 8 * number_of_channels; + audioDescription.mChannelsPerFrame = number_of_channels; + audioDescription.mFormatID = kAudioFormatLinearPCM; + audioDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked; + audioDescription.mFramesPerPacket = 1; + audioDescription.mReserved = 0; + audioDescription.mSampleRate = sample_rate; + CMAudioFormatDescriptionRef formatDesc; + CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &audioDescription, 0, nil, 0, nil, nil, &formatDesc); + CMSampleBufferRef buffer; + CMSampleTimingInfo timing; + timing.decodeTimeStamp = kCMTimeInvalid; + timing.presentationTimeStamp = CMTimeMake(0, sample_rate); + timing.duration = CMTimeMake(1, sample_rate); + CMSampleBufferCreate(kCFAllocatorDefault, nil, false, nil, nil, formatDesc, number_of_frames * number_of_channels, 1, &timing, 0, nil, &buffer); + CMSampleBufferSetDataBufferFromAudioBufferList(buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList); + @autoreleasepool { + FlutterRTCAudioSink* sink = (__bridge FlutterRTCAudioSink*)(object); + sink.format = formatDesc; + if (sink.bufferCallback != nil) { + sink.bufferCallback(buffer); + } else { + NSLog(@"Buffer callback is nil"); + } + } +} + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.h b/common/darwin/Classes/FlutterRTCFrameCapturer.h index 302e6fd71f..1708562c49 100644 --- a/common/darwin/Classes/FlutterRTCFrameCapturer.h +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.h @@ -9,4 +9,6 @@ - (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result; ++ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame; + @end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.m b/common/darwin/Classes/FlutterRTCFrameCapturer.m index 007f6dc0c8..88809c2897 100644 --- a/common/darwin/Classes/FlutterRTCFrameCapturer.m +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.m @@ -42,7 +42,7 @@ - (void)renderFrame:(nullable RTCVideoFrame *)frame CVPixelBufferRef pixelBufferRef; bool shouldRelease; if (![buffer isKindOfClass:[RTCCVPixelBuffer class]]) { - pixelBufferRef = [self convertToCVPixelBuffer:frame]; + pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame]; shouldRelease = true; } else { pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; @@ -113,7 +113,7 @@ - (void)renderFrame:(nullable RTCVideoFrame *)frame }); } --(CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame ++ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame { id i420Buffer = [frame.buffer toI420]; CVPixelBufferRef outputPixelBuffer; diff --git a/common/darwin/Classes/FlutterRTCMediaRecorder.h b/common/darwin/Classes/FlutterRTCMediaRecorder.h new file mode 100644 index 0000000000..3b6c65ba6a --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaRecorder.h @@ -0,0 +1,27 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif +#import + +@import Foundation; +@import AVFoundation; + +@interface FlutterRTCMediaRecorder : NSObject + +@property (nonatomic, strong) RTCVideoTrack * _Nullable videoTrack; +@property (nonatomic, strong) NSURL * _Nonnull output; +@property (nonatomic, strong) AVAssetWriter * _Nullable assetWriter; +@property (nonatomic, strong) AVAssetWriterInput * _Nullable writerInput; + +- (instancetype _Nonnull) initWithVideoTrack:(RTCVideoTrack * _Nullable)video + rotationDegrees:(NSNumber * _Nonnull)rotation + audioTrack:(RTCAudioTrack * _Nullable)audio + outputFile:(NSURL * _Nonnull)out; + +- (void) changeVideoTrack:(RTCVideoTrack * _Nonnull) track; + +- (void) stop:(_Nonnull FlutterResult) result; + +@end diff --git a/common/darwin/Classes/FlutterRTCMediaRecorder.m b/common/darwin/Classes/FlutterRTCMediaRecorder.m new file mode 100644 index 0000000000..a28775b7c2 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaRecorder.m @@ -0,0 +1,191 @@ +#import +#import "FlutterRTCMediaRecorder.h" +#import "FlutterRTCAudioSink.h" +#import "FlutterRTCFrameCapturer.h" + +@import AVFoundation; + +@implementation FlutterRTCMediaRecorder { + int framesCount; + bool isInitialized; + CGSize _renderSize; + RTCVideoRotation _rotation; + FlutterRTCAudioSink* _audioSink; + AVAssetWriterInput* _audioWriter; + int _additionalRotation; + int64_t _startTime; +} + +- (instancetype)initWithVideoTrack:(RTCVideoTrack *)video rotationDegrees:(NSNumber *)rotation audioTrack:(RTCAudioTrack *)audio outputFile:(NSURL *)out { + self = [super init]; + _rotation = -1; + isInitialized = false; + self.videoTrack = video; + self.output = out; + _additionalRotation = rotation.intValue; + [video addRenderer:self]; + framesCount = 0; + if (audio != nil) + _audioSink = [[FlutterRTCAudioSink alloc] initWithAudioTrack:audio]; + else + NSLog(@"Audio track is nil"); + _startTime = -1; + return self; +} + +- (void)changeVideoTrack:(RTCVideoTrack *)track { + if (self.videoTrack) { + [self.videoTrack removeRenderer:self]; + } + self.videoTrack = track; + [track addRenderer:self]; +} + +- (void)initialize:(CGSize)size { + _renderSize = size; + NSDictionary *videoSettings = @{ + AVVideoCompressionPropertiesKey: @{AVVideoAverageBitRateKey: @(6*1024*1024)}, + AVVideoCodecKey: AVVideoCodecTypeH264, + AVVideoHeightKey: @(size.height), + AVVideoWidthKey: @(size.width), + }; + self.writerInput = [[AVAssetWriterInput alloc] + initWithMediaType:AVMediaTypeVideo + outputSettings:videoSettings]; + self.writerInput.expectsMediaDataInRealTime = true; + self.writerInput.mediaTimeScale = 30; + int rotationDegrees = _additionalRotation; + switch (_rotation) { + case RTCVideoRotation_0: break; + case RTCVideoRotation_90: rotationDegrees += 90; break; + case RTCVideoRotation_180: rotationDegrees += 180; break; + case RTCVideoRotation_270: rotationDegrees += 270; break; + default: break; + } + rotationDegrees %= 360; + self.writerInput.transform = CGAffineTransformMakeRotation(M_PI * rotationDegrees / 180); + + if (_audioSink != nil) { + AudioChannelLayout acl; + bzero(&acl, sizeof(acl)); + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; + NSDictionary* audioSettings = @{ + AVFormatIDKey: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], + AVNumberOfChannelsKey: @1, + AVSampleRateKey: @44100.0, + AVChannelLayoutKey: [NSData dataWithBytes:&acl length:sizeof(AudioChannelLayout)], + AVEncoderBitRateKey: @64000, + }; + _audioWriter = [[AVAssetWriterInput alloc] + initWithMediaType:AVMediaTypeAudio + outputSettings:audioSettings + sourceFormatHint:_audioSink.format]; + _audioWriter.expectsMediaDataInRealTime = true; + } + + NSError *error; + self.assetWriter = [[AVAssetWriter alloc] + initWithURL:self.output + fileType:AVFileTypeMPEG4 + error:&error]; + if (error != nil) + NSLog(@"%@",[error localizedDescription]); + self.assetWriter.shouldOptimizeForNetworkUse = true; + [self.assetWriter addInput:self.writerInput]; + if (_audioWriter != nil) { + [self.assetWriter addInput:_audioWriter]; + _audioSink.bufferCallback = ^(CMSampleBufferRef buffer){ + if (self->_audioWriter.readyForMoreMediaData) { + if ([self->_audioWriter appendSampleBuffer:buffer]) + NSLog(@"Audio frame appended"); + else + NSLog(@"Audioframe not appended %@", self.assetWriter.error); + } + }; + } + [self.assetWriter startWriting]; + [self.assetWriter startSessionAtSourceTime:kCMTimeZero]; + + isInitialized = true; +} + +- (void)setSize:(CGSize)size { +} + +- (void)renderFrame:(nullable RTCVideoFrame *)frame { + if (frame == nil) { + return; + } + if (!isInitialized) { + _rotation = frame.rotation; + [self initialize:CGSizeMake((CGFloat) frame.width, (CGFloat) frame.height)]; + } + if (!self.writerInput.readyForMoreMediaData) { + NSLog(@"Drop frame, not ready"); + return; + } + id buffer = frame.buffer; + CVPixelBufferRef pixelBufferRef; + BOOL shouldRelease = false; + if ([buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; + } else { + pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame]; + shouldRelease = true; + } + CMVideoFormatDescriptionRef formatDescription; + OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBufferRef, &formatDescription); + + CMSampleTimingInfo timingInfo; + + timingInfo.decodeTimeStamp = kCMTimeInvalid; + if (_startTime == -1) { + _startTime = frame.timeStampNs / 1000; + } + int64_t frameTime = (frame.timeStampNs / 1000) - _startTime; + timingInfo.presentationTimeStamp = CMTimeMake(frameTime, 1000000); + framesCount++; + + CMSampleBufferRef outBuffer; + + status = CMSampleBufferCreateReadyWithImageBuffer( + kCFAllocatorDefault, + pixelBufferRef, + formatDescription, + &timingInfo, + &outBuffer + ); + + if (![self.writerInput appendSampleBuffer:outBuffer]) { + NSLog(@"Frame not appended %@", self.assetWriter.error); + } + #if TARGET_OS_IPHONE + if (shouldRelease) { + CVPixelBufferRelease(pixelBufferRef); + } + #endif +} + +- (void)stop:(FlutterResult _Nonnull) result { + if (_audioSink != nil) { + _audioSink.bufferCallback = nil; + [_audioSink close]; + } + [self.videoTrack removeRenderer:self]; + [self.writerInput markAsFinished]; + [_audioWriter markAsFinished]; + dispatch_async(dispatch_get_main_queue(), ^{ + [self.assetWriter finishWritingWithCompletionHandler:^{ + NSError* error = self.assetWriter.error; + if (error == nil) { + result(nil); + } else { + result([FlutterError errorWithCode:@"Failed to save recording" + message:[error localizedDescription] + details:nil]); + } + }]; + }); +} + +@end diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.h b/common/darwin/Classes/FlutterWebRTCPlugin.h index 697fb08929..d9ad7747b0 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.h +++ b/common/darwin/Classes/FlutterWebRTCPlugin.h @@ -9,6 +9,7 @@ @class FlutterRTCVideoRenderer; @class FlutterRTCFrameCapturer; +@class FlutterRTCMediaRecorder; typedef void (^CompletionHandler)(void); @@ -25,6 +26,7 @@ typedef void (^CapturerStopHandler)(CompletionHandler handler); @property (nonatomic, strong) NSMutableDictionary *localStreams; @property (nonatomic, strong) NSMutableDictionary *localTracks; @property (nonatomic, strong) NSMutableDictionary *renders; +@property (nonatomic, strong) NSMutableDictionary *recorders; @property (nonatomic, strong) NSMutableDictionary *videoCapturerStopHandlers; #if TARGET_OS_IPHONE diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.m b/common/darwin/Classes/FlutterWebRTCPlugin.m index 399a1df1dd..57916a3a6a 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.m +++ b/common/darwin/Classes/FlutterWebRTCPlugin.m @@ -4,6 +4,7 @@ #import "FlutterRTCDataChannel.h" #import "FlutterRTCDesktopCapturer.h" #import "FlutterRTCVideoRenderer.h" +#import "FlutterRTCMediaRecorder.h" #import "AudioUtils.h" #import @@ -89,6 +90,7 @@ - (instancetype)initWithChannel:(FlutterMethodChannel *)channel self.localTracks = [NSMutableDictionary new]; self.renders = [NSMutableDictionary new]; self.videoCapturerStopHandlers = [NSMutableDictionary new]; + self.recorders = [NSMutableDictionary new]; #if TARGET_OS_IPHONE AVAudioSession *session = [AVAudioSession sharedInstance]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didSessionRouteChange:) name:AVAudioSessionRouteChangeNotification object:session]; @@ -1049,6 +1051,59 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result } else if ([@"getDesktopSourceThumbnail" isEqualToString:call.method]){ NSDictionary* argsMap = call.arguments; [self getDesktopSourceThumbnail:argsMap result:result]; + } else if ([@"startRecordToFile" isEqualToString:call.method]){ + NSDictionary* argsMap = call.arguments; + NSNumber* recorderId = argsMap[@"recorderId"]; + NSString* path = argsMap[@"path"]; + NSString* trackId = argsMap[@"videoTrackId"]; + NSString* audioTrackId = argsMap[@"audioTrackId"]; + NSNumber* rotation = argsMap[@"rotation"]; + RTCMediaStreamTrack *track = [self trackForId:trackId]; + RTCMediaStreamTrack *audioTrack = [self trackForId:audioTrackId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + NSURL* pathUrl = [NSURL fileURLWithPath:path]; + self.recorders[recorderId] = [[FlutterRTCMediaRecorder alloc] + initWithVideoTrack:(RTCVideoTrack *)track + rotationDegrees:rotation + audioTrack:(RTCAudioTrack *)audioTrack + outputFile:pathUrl + ]; + } + result(nil); + } else if ([@"changeRecorderTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* recorderId = argsMap[@"recorderId"]; + NSString* trackId = argsMap[@"videoTrackId"]; + RTCMediaStreamTrack *track = [self trackForId:trackId]; + FlutterRTCMediaRecorder* recorder = self.recorders[recorderId]; + if (track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@ failed",call.method] + message:[NSString stringWithFormat:@"Error: track with id %@ not found!",trackId] + details:nil]); + } else if (recorder != nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@ failed",call.method] + message:[NSString stringWithFormat:@"Error: recorder with id %@ not found!",recorderId] + details:nil]); + } else if (recorder.videoTrack == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@ failed",call.method] + message:[NSString stringWithFormat:@"Error: recorder with id %@ doesn't have video track!",recorderId] + details:nil]); + } else { + [recorder changeVideoTrack:(RTCVideoTrack *)track]; + result(nil); + } + } else if ([@"stopRecordToFile" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* recorderId = argsMap[@"recorderId"]; + FlutterRTCMediaRecorder* recorder = self.recorders[recorderId]; + if (recorder != nil) { + [recorder stop:result]; + [self.recorders removeObjectForKey:recorderId]; + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@ failed",call.method] + message:[NSString stringWithFormat:@"Error: recorder with id %@ not found!",recorderId] + details:nil]); + } } else { result(FlutterMethodNotImplemented); } diff --git a/common/darwin/Classes/RTCAudioSource+Private.h b/common/darwin/Classes/RTCAudioSource+Private.h new file mode 100644 index 0000000000..6e45d12fbf --- /dev/null +++ b/common/darwin/Classes/RTCAudioSource+Private.h @@ -0,0 +1,14 @@ +#ifdef __cplusplus +#import "WebRTC/RTCAudioSource.h" +#include "media_stream_interface.h" + +@interface RTCAudioSource () + +/** + * The AudioSourceInterface object passed to this RTCAudioSource during + * construction. + */ +@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioSource; + +@end +#endif diff --git a/common/darwin/Classes/audio_sink_bridge.cpp b/common/darwin/Classes/audio_sink_bridge.cpp new file mode 100644 index 0000000000..16ce8fa841 --- /dev/null +++ b/common/darwin/Classes/audio_sink_bridge.cpp @@ -0,0 +1,27 @@ +#include "media_stream_interface.h" +#include "FlutterRTCAudioSink-Interface.h" + +class AudioSinkBridge : public webrtc::AudioTrackSinkInterface { +private: + void* sink; + +public: + AudioSinkBridge(void* sink1) { + sink = sink1; + } + void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) override + { + RTCAudioSinkCallback(sink, + audio_data, + bits_per_sample, + sample_rate, + number_of_channels, + number_of_frames + ); + }; + int NumPreferredChannels() const override { return 1; } +}; diff --git a/common/darwin/Classes/media_stream_interface.h b/common/darwin/Classes/media_stream_interface.h new file mode 100644 index 0000000000..e25553f9fa --- /dev/null +++ b/common/darwin/Classes/media_stream_interface.h @@ -0,0 +1,199 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +/// Source https://webrtc.googlesource.com/src/+/master/api/media_stream_interface.h + +#ifdef __cplusplus +#ifndef API_MEDIA_STREAM_INTERFACE_H_ +#define API_MEDIA_STREAM_INTERFACE_H_ + +#include +#include +#include +#include +#include + +namespace webrtc { + + // Generic observer interface. + class ObserverInterface { + public: + virtual void OnChanged() = 0; + protected: + virtual ~ObserverInterface() {} + }; + class NotifierInterface { + public: + virtual void RegisterObserver(ObserverInterface* observer) = 0; + virtual void UnregisterObserver(ObserverInterface* observer) = 0; + virtual ~NotifierInterface() {} + }; + + enum class RefCountReleaseStatus { kDroppedLastRef, kOtherRefsRemained }; + // Interfaces where refcounting is part of the public api should + // inherit this abstract interface. The implementation of these + // methods is usually provided by the RefCountedObject template class, + // applied as a leaf in the inheritance tree. + class RefCountInterface { + public: + virtual void AddRef() const = 0; + virtual RefCountReleaseStatus Release() const = 0; + // Non-public destructor, because Release() has exclusive responsibility for + // destroying the object. + protected: + virtual ~RefCountInterface() {} + }; + + // Base class for sources. A MediaStreamTrack has an underlying source that + // provides media. A source can be shared by multiple tracks. + class MediaSourceInterface : public RefCountInterface, + public NotifierInterface { + public: + enum SourceState { kInitializing, kLive, kEnded, kMuted }; + virtual SourceState state() const = 0; + virtual bool remote() const = 0; + protected: + ~MediaSourceInterface() override = default; + }; + + // Interface for receiving audio data from a AudioTrack. + class AudioTrackSinkInterface { + public: + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) { + + }; + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + void* absolute_capture_timestamp_ms) { + // TODO(bugs.webrtc.org/10739): Deprecate the old OnData and make this one + // pure virtual. + return OnData(audio_data, bits_per_sample, sample_rate, number_of_channels, + number_of_frames); + } + virtual int NumPreferredChannels() const { return -1; } + protected: + virtual ~AudioTrackSinkInterface() {} + }; + // AudioSourceInterface is a reference counted source used for AudioTracks. + // The same source can be used by multiple AudioTracks. + class AudioSourceInterface : public MediaSourceInterface { + public: + class AudioObserver { + public: + virtual void OnSetVolume(double volume) = 0; + protected: + virtual ~AudioObserver() {} + }; + // TODO(deadbeef): Makes all the interfaces pure virtual after they're + // implemented in chromium. + // Sets the volume of the source. |volume| is in the range of [0, 10]. + // TODO(tommi): This method should be on the track and ideally volume should + // be applied in the track in a way that does not affect clones of the track. + virtual void SetVolume(double volume) {} + // Registers/unregisters observers to the audio source. + virtual void RegisterAudioObserver(AudioObserver* observer) {} + virtual void UnregisterAudioObserver(AudioObserver* observer) {} + // TODO(tommi): Make pure virtual. + virtual void AddSink(AudioTrackSinkInterface* sink) {} + virtual void RemoveSink(AudioTrackSinkInterface* sink) {} + // Returns options for the AudioSource. + // (for some of the settings this approach is broken, e.g. setting + // audio network adaptation on the source is the wrong layer of abstraction). +// virtual const AudioOptions options() const; + }; +} +namespace rtc { + + template + class scoped_refptr { + public: + typedef T element_type; + scoped_refptr() : ptr_(nullptr) {} + scoped_refptr(std::nullptr_t) : ptr_(nullptr) {} // NOLINT(runtime/explicit) + explicit scoped_refptr(T* p) : ptr_(p) { + if (ptr_) + ptr_->AddRef(); + } + scoped_refptr(const scoped_refptr& r) : ptr_(r.ptr_) { + if (ptr_) + ptr_->AddRef(); + } + template + scoped_refptr(const scoped_refptr& r) : ptr_(r.get()) { + if (ptr_) + ptr_->AddRef(); + } + // Move constructors. + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + template + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + ~scoped_refptr() { + if (ptr_) + ptr_->Release(); + } + T* get() const { return ptr_; } + explicit operator bool() const { return ptr_ != nullptr; } + T& operator*() const { return *ptr_; } + T* operator->() const { return ptr_; } + // Returns the (possibly null) raw pointer, and makes the scoped_refptr hold a + // null pointer, all without touching the reference count of the underlying + // pointed-to object. The object is still reference counted, and the caller of + // release() is now the proud owner of one reference, so it is responsible for + // calling Release() once on the object when no longer using it. + T* release() { + T* retVal = ptr_; + ptr_ = nullptr; + return retVal; + } + scoped_refptr& operator=(T* p) { + // AddRef first so that self assignment should work + if (p) + p->AddRef(); + if (ptr_) + ptr_->Release(); + ptr_ = p; + return *this; + } + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.ptr_; + } + template + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.get(); + } + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + template + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + void swap(T** pp) noexcept { + T* p = ptr_; + ptr_ = *pp; + *pp = p; + } + void swap(scoped_refptr& r) noexcept { swap(&r.ptr_); } + protected: + T* ptr_; + }; +}; + +#endif // API_MEDIA_STREAM_INTERFACE_H_ +#endif // __cplusplus diff --git a/example/lib/src/get_user_media_sample.dart b/example/lib/src/get_user_media_sample.dart index 9cc0296bae..0fb77f81b0 100644 --- a/example/lib/src/get_user_media_sample.dart +++ b/example/lib/src/get_user_media_sample.dart @@ -98,15 +98,30 @@ class _GetUserMediaSampleState extends State { void _startRecording() async { if (_localStream == null) throw Exception('Stream is not initialized'); - if (Platform.isIOS) { - print('Recording is not available on iOS'); - return; - } // TODO(rostopira): request write storage permission - final storagePath = await getExternalStorageDirectory(); - if (storagePath == null) throw Exception('Can\'t find storagePath'); - - final filePath = storagePath.path + '/webrtc_sample/test.mp4'; + String? filePath; + if (Platform.isAndroid) { + filePath = (await getExternalStorageDirectory())?.path; + if (filePath != null) { + filePath += '/Movies/test.mp4'; + } + } else if (Platform.isIOS || Platform.isMacOS) { + final tempDir = await getTemporaryDirectory(); + if (!(await tempDir.exists())) { + await tempDir.create(recursive: true); + } + filePath = tempDir.path + '/test.mp4'; + print(filePath); + } else { + throw 'Unsupported platform'; + } + if (filePath == null) { + throw Exception('Can\'t find storagePath'); + } + final file = File(filePath); + if (await file.exists()) { + await file.delete(); + } _mediaRecorder = MediaRecorder(); setState(() {}); diff --git a/ios/Classes/FlutterRTCAudioSink-Interface.h b/ios/Classes/FlutterRTCAudioSink-Interface.h new file mode 120000 index 0000000000..940c06d646 --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink-Interface.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink-Interface.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCAudioSink.h b/ios/Classes/FlutterRTCAudioSink.h new file mode 120000 index 0000000000..5242de9e22 --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCAudioSink.mm b/ios/Classes/FlutterRTCAudioSink.mm new file mode 120000 index 0000000000..c15372c4ed --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink.mm @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink.mm \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaRecorder.h b/ios/Classes/FlutterRTCMediaRecorder.h new file mode 120000 index 0000000000..31ca7e3b5f --- /dev/null +++ b/ios/Classes/FlutterRTCMediaRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaRecorder.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaRecorder.m b/ios/Classes/FlutterRTCMediaRecorder.m new file mode 120000 index 0000000000..1c2b1bf1a8 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaRecorder.m \ No newline at end of file diff --git a/ios/Classes/RTCAudioSource+Private.h b/ios/Classes/RTCAudioSource+Private.h new file mode 120000 index 0000000000..7ce3b77fd6 --- /dev/null +++ b/ios/Classes/RTCAudioSource+Private.h @@ -0,0 +1 @@ +../../common/darwin/Classes/RTCAudioSource+Private.h \ No newline at end of file diff --git a/ios/Classes/audio_sink_bridge.cpp b/ios/Classes/audio_sink_bridge.cpp new file mode 120000 index 0000000000..13215e8454 --- /dev/null +++ b/ios/Classes/audio_sink_bridge.cpp @@ -0,0 +1 @@ +../../common/darwin/Classes/audio_sink_bridge.cpp \ No newline at end of file diff --git a/ios/Classes/media_stream_interface.h b/ios/Classes/media_stream_interface.h new file mode 120000 index 0000000000..5810a86316 --- /dev/null +++ b/ios/Classes/media_stream_interface.h @@ -0,0 +1 @@ +../../common/darwin/Classes/media_stream_interface.h \ No newline at end of file diff --git a/ios/flutter_webrtc.podspec b/ios/flutter_webrtc.podspec index 5eb33766d8..0968f7ca30 100644 --- a/ios/flutter_webrtc.podspec +++ b/ios/flutter_webrtc.podspec @@ -18,4 +18,9 @@ A new flutter plugin project. s.dependency 'WebRTC-SDK', '104.5112.02' s.ios.deployment_target = '10.0' s.static_framework = true + s.pod_target_xcconfig = { + 'CLANG_CXX_LANGUAGE_STANDARD' => 'c++14', + 'USER_HEADER_SEARCH_PATHS' => 'Classes/**/*.h' + } + s.libraries = 'c++' end diff --git a/lib/src/media_recorder.dart b/lib/src/media_recorder.dart index 7076f4eb5b..507808628f 100644 --- a/lib/src/media_recorder.dart +++ b/lib/src/media_recorder.dart @@ -7,13 +7,28 @@ class MediaRecorder extends rtc.MediaRecorder { final rtc.MediaRecorder _delegate; @override - Future start(String path, - {MediaStreamTrack? videoTrack, RecorderAudioChannel? audioChannel}) => - _delegate.start(path, videoTrack: videoTrack, audioChannel: audioChannel); + Future start( + String path, { + MediaStreamTrack? videoTrack, + RecorderAudioChannel? audioChannel, + MediaStreamTrack? audioTrack, + int rotationDegrees = 0, + }) { + return _delegate.start( + path, + videoTrack: videoTrack, + audioChannel: audioChannel, + rotationDegrees: rotationDegrees, + ); + } @override Future stop() => _delegate.stop(); + @override + Future changeVideoTrack(MediaStreamTrack videoTrack) => + _delegate.changeVideoTrack(videoTrack); + @override void startWeb( MediaStream stream, { diff --git a/lib/src/native/media_recorder_impl.dart b/lib/src/native/media_recorder_impl.dart index 072a5c6254..48875060d2 100644 --- a/lib/src/native/media_recorder_impl.dart +++ b/lib/src/native/media_recorder_impl.dart @@ -8,21 +8,41 @@ import 'utils.dart'; class MediaRecorderNative extends MediaRecorder { static final _random = Random(); final _recorderId = _random.nextInt(0x7FFFFFFF); + var _isStarted = false; @override - Future start(String path, - {MediaStreamTrack? videoTrack, RecorderAudioChannel? audioChannel - // TODO(cloudwebrtc): add codec/quality options - }) async { + Future start( + String path, { + MediaStreamTrack? videoTrack, + RecorderAudioChannel? audioChannel, + MediaStreamTrack? audioTrack, + int rotationDegrees = 0, + }) async { if (audioChannel == null && videoTrack == null) { throw Exception('Neither audio nor video track were provided'); } - + if ((WebRTC.platformIsIOS || WebRTC.platformIsMacOS) && audioTrack != null) { + print("Warning! Audio recording is experimental on iOS/macOS!"); + } await WebRTC.invokeMethod('startRecordToFile', { 'path': path, if (audioChannel != null) 'audioChannel': audioChannel.index, if (videoTrack != null) 'videoTrackId': videoTrack.id, - 'recorderId': _recorderId + if (audioTrack != null) 'audioTrackId': audioTrack.id, + 'rotation': rotationDegrees, + 'recorderId': _recorderId, + }); + _isStarted = true; + } + + @override + Future changeVideoTrack(MediaStreamTrack videoTrack) async { + if (!_isStarted) { + throw "Media recorder not started!"; + } + await WebRTC.invokeMethod('changeRecorderTrack', { + 'videoTrackId': videoTrack.id, + 'recorderId': _recorderId, }); } @@ -35,6 +55,11 @@ class MediaRecorderNative extends MediaRecorder { } @override - Future stop() async => await WebRTC.invokeMethod( + Future stop() async { + if (!_isStarted) { + throw "Media recorder not started!"; + } + return await WebRTC.invokeMethod( 'stopRecordToFile', {'recorderId': _recorderId}); + } }