diff --git a/CHANGELOG.md b/CHANGELOG.md index d739c5a13e..cfdc505205 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,16 @@ # Changelog +[0.13.2] - 2025-04-29 + +* [iOS/Android]feat: Media Recorder implementation Android and iOS (#1810) +* [Wndows] fix: Pickup registrar for plugin by plugin registrar manager (#1752) +* [Linux] fix: add task runner for linux. (#1821) + +[0.13.1+hotfix.1] - 2025-04-07 + +* [Android] fix: Fix `clearAndroidCommunicationDevice` call blocking. + [0.13.1] - 2025-04-03 * [Android] fix: remove setPreferredInputDevice when getUserAduio. (#1808) diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java index a03494a44e..0b0998f384 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java @@ -4,6 +4,7 @@ import android.app.Activity; import android.app.Fragment; import android.app.FragmentTransaction; +import android.content.ContentResolver; import android.content.ContentValues; import android.content.Context; import android.content.Intent; @@ -13,12 +14,14 @@ import android.media.AudioDeviceInfo; import android.media.projection.MediaProjection; import android.media.projection.MediaProjectionManager; +import android.net.Uri; import android.os.Build; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.Handler; import android.os.Looper; +import android.os.ParcelFileDescriptor; import android.os.ResultReceiver; import android.provider.MediaStore; import android.util.Log; @@ -69,6 +72,9 @@ import org.webrtc.audio.JavaAudioDeviceModule; import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -963,22 +969,64 @@ void startRecordingToFile( mediaRecorders.append(id, mediaRecorder); } - void stopRecording(Integer id) { - MediaRecorderImpl mediaRecorder = mediaRecorders.get(id); - if (mediaRecorder != null) { - mediaRecorder.stopRecording(); - mediaRecorders.remove(id); - File file = mediaRecorder.getRecordFile(); - if (file != null) { - ContentValues values = new ContentValues(3); - values.put(MediaStore.Video.Media.TITLE, file.getName()); - values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4"); - values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath()); - applicationContext - .getContentResolver() - .insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values); + void stopRecording(Integer id, String albumName) { + try { + MediaRecorderImpl mediaRecorder = mediaRecorders.get(id); + if (mediaRecorder != null) { + mediaRecorder.stopRecording(); + mediaRecorders.remove(id); + File file = mediaRecorder.getRecordFile(); + Uri collection; + + if (file != null) { + ContentValues values = new ContentValues(); + values.put(MediaStore.Video.Media.TITLE, file.getName()); + values.put(MediaStore.Video.Media.DISPLAY_NAME, file.getName()); + values.put(MediaStore.Video.Media.ALBUM, albumName); + values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4"); + values.put(MediaStore.Video.Media.DATE_ADDED, System.currentTimeMillis() / 1000); + values.put(MediaStore.Video.Media.DATE_TAKEN, System.currentTimeMillis()); + + //Android version above 9 MediaStore uses RELATIVE_PATH + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + values.put(MediaStore.Video.Media.RELATIVE_PATH, "Movies/" + albumName); + values.put(MediaStore.Video.Media.IS_PENDING, 1); + + collection = MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY); + } else { + //Android version 9 and below MediaStore uses DATA + values.put(MediaStore.Video.Media.DATA, "/storage/emulated/0/Movies/" + albumName + "/" + file.getName()); + + collection = MediaStore.Video.Media.EXTERNAL_CONTENT_URI; + } + + ContentResolver resolver = applicationContext.getContentResolver(); + Uri uriSavedMedia = resolver.insert(collection, values); + + assert uriSavedMedia != null; + ParcelFileDescriptor pfd = resolver.openFileDescriptor(uriSavedMedia, "w"); + assert pfd != null; + FileOutputStream out = new FileOutputStream(pfd.getFileDescriptor()); + + InputStream in = new FileInputStream(file); + + byte[] buf = new byte[8192]; + int len; + + while ((len = in.read(buf)) > 0) { + out.write(buf, 0, len); + } + + out.close(); + in.close(); + pfd.close(); + values.clear(); + } } + } catch(Exception e){ + } + } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index b5c3df69ef..8444c0e66b 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -696,6 +696,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { } case "clearAndroidCommunicationDevice": { AudioSwitchManager.instance.clearCommunicationDevice(); + result.success(null); break; } case "setMicrophoneMute": @@ -769,7 +770,8 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { break; case "stopRecordToFile": Integer recorderId = call.argument("recorderId"); - getUserMediaImpl.stopRecording(recorderId); + String albumName = call.argument("albumName"); + getUserMediaImpl.stopRecording(recorderId, albumName); result.success(null); break; case "captureFrame": { diff --git a/common/darwin/Classes/FlutterRTCAudioSink-Interface.h b/common/darwin/Classes/FlutterRTCAudioSink-Interface.h new file mode 100644 index 0000000000..8a0352333d --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink-Interface.h @@ -0,0 +1,6 @@ +void RTCAudioSinkCallback (void *object, + const void *audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames); diff --git a/common/darwin/Classes/FlutterRTCAudioSink.h b/common/darwin/Classes/FlutterRTCAudioSink.h new file mode 100644 index 0000000000..34cf46669c --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink.h @@ -0,0 +1,14 @@ +#import +#import +#import + +@interface FlutterRTCAudioSink : NSObject + +@property (nonatomic, copy) void (^bufferCallback)(CMSampleBufferRef); +@property (nonatomic) CMAudioFormatDescriptionRef format; + +- (instancetype) initWithAudioTrack:(RTCAudioTrack*)audio; + +- (void) close; + +@end diff --git a/common/darwin/Classes/FlutterRTCAudioSink.mm b/common/darwin/Classes/FlutterRTCAudioSink.mm new file mode 100644 index 0000000000..4fb575b398 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCAudioSink.mm @@ -0,0 +1,67 @@ +#import +#import "FlutterRTCAudioSink.h" +#import "RTCAudioSource+Private.h" +#include "media_stream_interface.h" +#include "audio_sink_bridge.cpp" + +@implementation FlutterRTCAudioSink { + AudioSinkBridge *_bridge; + webrtc::AudioSourceInterface* _audioSource; +} + +- (instancetype) initWithAudioTrack:(RTCAudioTrack* )audio { + self = [super init]; + rtc::scoped_refptr audioSourcePtr = audio.source.nativeAudioSource; + _audioSource = audioSourcePtr.get(); + _bridge = new AudioSinkBridge((void*)CFBridgingRetain(self)); + _audioSource->AddSink(_bridge); + return self; +} + +- (void) close { + _audioSource->RemoveSink(_bridge); + delete _bridge; + _bridge = nil; + _audioSource = nil; +} + +void RTCAudioSinkCallback (void *object, const void *audio_data, int bits_per_sample, int sample_rate, size_t number_of_channels, size_t number_of_frames) +{ + AudioBufferList audioBufferList; + AudioBuffer audioBuffer; + audioBuffer.mData = (void*) audio_data; + audioBuffer.mDataByteSize = bits_per_sample / 8 * number_of_channels * number_of_frames; + audioBuffer.mNumberChannels = number_of_channels; + audioBufferList.mNumberBuffers = 1; + audioBufferList.mBuffers[0] = audioBuffer; + AudioStreamBasicDescription audioDescription; + audioDescription.mBytesPerFrame = bits_per_sample / 8 * number_of_channels; + audioDescription.mBitsPerChannel = bits_per_sample; + audioDescription.mBytesPerPacket = bits_per_sample / 8 * number_of_channels; + audioDescription.mChannelsPerFrame = number_of_channels; + audioDescription.mFormatID = kAudioFormatLinearPCM; + audioDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked; + audioDescription.mFramesPerPacket = 1; + audioDescription.mReserved = 0; + audioDescription.mSampleRate = sample_rate; + CMAudioFormatDescriptionRef formatDesc; + CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &audioDescription, 0, nil, 0, nil, nil, &formatDesc); + CMSampleBufferRef buffer; + CMSampleTimingInfo timing; + timing.decodeTimeStamp = kCMTimeInvalid; + timing.presentationTimeStamp = CMTimeMake(0, sample_rate); + timing.duration = CMTimeMake(1, sample_rate); + CMSampleBufferCreate(kCFAllocatorDefault, nil, false, nil, nil, formatDesc, number_of_frames * number_of_channels, 1, &timing, 0, nil, &buffer); + CMSampleBufferSetDataBufferFromAudioBufferList(buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList); + @autoreleasepool { + FlutterRTCAudioSink* sink = (__bridge FlutterRTCAudioSink*)(object); + sink.format = formatDesc; + if (sink.bufferCallback != nil) { + sink.bufferCallback(buffer); + } else { + NSLog(@"Buffer callback is nil"); + } + } +} + +@end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.h b/common/darwin/Classes/FlutterRTCFrameCapturer.h index ba4c801d92..7cc0ff28c2 100644 --- a/common/darwin/Classes/FlutterRTCFrameCapturer.h +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.h @@ -12,4 +12,6 @@ toPath:(NSString*)path result:(FlutterResult)result; ++ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame; + @end diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.m b/common/darwin/Classes/FlutterRTCFrameCapturer.m index baf12de1e0..fe748b1223 100644 --- a/common/darwin/Classes/FlutterRTCFrameCapturer.m +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.m @@ -41,7 +41,7 @@ - (void)renderFrame:(nullable RTCVideoFrame*)frame { CVPixelBufferRef pixelBufferRef; bool shouldRelease; if (![buffer isKindOfClass:[RTCCVPixelBuffer class]]) { - pixelBufferRef = [self convertToCVPixelBuffer:frame]; + pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame]; shouldRelease = true; } else { pixelBufferRef = ((RTCCVPixelBuffer*)buffer).pixelBuffer; @@ -108,7 +108,7 @@ - (void)renderFrame:(nullable RTCVideoFrame*)frame { }); } -- (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame { ++ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame { id i420Buffer = [frame.buffer toI420]; CVPixelBufferRef outputPixelBuffer; size_t w = (size_t)roundf(i420Buffer.width); diff --git a/common/darwin/Classes/FlutterRTCMediaRecorder.h b/common/darwin/Classes/FlutterRTCMediaRecorder.h new file mode 100644 index 0000000000..eac82e8b4d --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaRecorder.h @@ -0,0 +1,24 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif +#import + +@import Foundation; +@import AVFoundation; + +@interface FlutterRTCMediaRecorder : NSObject + +@property(nonatomic, strong) RTCVideoTrack* _Nullable videoTrack; +@property(nonatomic, strong) NSURL* _Nonnull output; +@property(nonatomic, strong) AVAssetWriter* _Nullable assetWriter; +@property(nonatomic, strong) AVAssetWriterInput* _Nullable writerInput; + +- (instancetype _Nonnull)initWithVideoTrack:(RTCVideoTrack* _Nullable)video + audioTrack:(RTCAudioTrack* _Nullable)audio + outputFile:(NSURL* _Nonnull)out; + +- (void)stop:(_Nonnull FlutterResult)result; + +@end diff --git a/common/darwin/Classes/FlutterRTCMediaRecorder.m b/common/darwin/Classes/FlutterRTCMediaRecorder.m new file mode 100644 index 0000000000..7661aae519 --- /dev/null +++ b/common/darwin/Classes/FlutterRTCMediaRecorder.m @@ -0,0 +1,168 @@ +#import +#import "FlutterRTCMediaRecorder.h" +#import "FlutterRTCAudioSink.h" +#import "FlutterRTCFrameCapturer.h" + +@import AVFoundation; + +@implementation FlutterRTCMediaRecorder { + int framesCount; + bool isInitialized; + CGSize _renderSize; + FlutterRTCAudioSink* _audioSink; + AVAssetWriterInput* _audioWriter; + int64_t _startTime; +} + +- (instancetype)initWithVideoTrack:(RTCVideoTrack *)video audioTrack:(RTCAudioTrack *)audio outputFile:(NSURL *)out { + self = [super init]; + isInitialized = false; + self.videoTrack = video; + self.output = out; + [video addRenderer:self]; + framesCount = 0; + if (audio != nil) + _audioSink = [[FlutterRTCAudioSink alloc] initWithAudioTrack:audio]; + else + NSLog(@"Audio track is nil"); + _startTime = -1; + return self; +} + +- (void)initialize:(CGSize)size { + _renderSize = size; + NSDictionary *videoSettings = @{ + AVVideoCompressionPropertiesKey: @{AVVideoAverageBitRateKey: @(6*1024*1024)}, + AVVideoCodecKey: AVVideoCodecTypeH264, + AVVideoHeightKey: @(size.height), + AVVideoWidthKey: @(size.width), + }; + self.writerInput = [[AVAssetWriterInput alloc] + initWithMediaType:AVMediaTypeVideo + outputSettings:videoSettings]; + self.writerInput.expectsMediaDataInRealTime = true; + self.writerInput.mediaTimeScale = 30; + + if (_audioSink != nil) { + AudioChannelLayout acl; + bzero(&acl, sizeof(acl)); + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; + NSDictionary* audioSettings = @{ + AVFormatIDKey: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], + AVNumberOfChannelsKey: @1, + AVSampleRateKey: @44100.0, + AVChannelLayoutKey: [NSData dataWithBytes:&acl length:sizeof(AudioChannelLayout)], + AVEncoderBitRateKey: @64000, + }; + _audioWriter = [[AVAssetWriterInput alloc] + initWithMediaType:AVMediaTypeAudio + outputSettings:audioSettings + sourceFormatHint:_audioSink.format]; + _audioWriter.expectsMediaDataInRealTime = true; + } + + NSError *error; + self.assetWriter = [[AVAssetWriter alloc] + initWithURL:self.output + fileType:AVFileTypeMPEG4 + error:&error]; + if (error != nil) + NSLog(@"%@",[error localizedDescription]); + self.assetWriter.shouldOptimizeForNetworkUse = true; + [self.assetWriter addInput:self.writerInput]; + if (_audioWriter != nil) { + [self.assetWriter addInput:_audioWriter]; + _audioSink.bufferCallback = ^(CMSampleBufferRef buffer){ + if (self->_audioWriter.readyForMoreMediaData) { + if ([self->_audioWriter appendSampleBuffer:buffer]) + NSLog(@"Audio frame appended"); + else + NSLog(@"Audioframe not appended %@", self.assetWriter.error); + } + }; + } + [self.assetWriter startWriting]; + [self.assetWriter startSessionAtSourceTime:kCMTimeZero]; + + isInitialized = true; +} + +- (void)setSize:(CGSize)size { +} + +- (void)renderFrame:(nullable RTCVideoFrame *)frame { + if (frame == nil) { + return; + } + if (!isInitialized) { + [self initialize:CGSizeMake((CGFloat) frame.width, (CGFloat) frame.height)]; + } + if (!self.writerInput.readyForMoreMediaData) { + NSLog(@"Drop frame, not ready"); + return; + } + id buffer = frame.buffer; + CVPixelBufferRef pixelBufferRef; + BOOL shouldRelease = false; + if ([buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer; + } else { + pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame]; + shouldRelease = true; + } + CMVideoFormatDescriptionRef formatDescription; + OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBufferRef, &formatDescription); + + CMSampleTimingInfo timingInfo; + + timingInfo.decodeTimeStamp = kCMTimeInvalid; + if (_startTime == -1) { + _startTime = frame.timeStampNs / 1000; + } + int64_t frameTime = (frame.timeStampNs / 1000) - _startTime; + timingInfo.presentationTimeStamp = CMTimeMake(frameTime, 1000000); + framesCount++; + + CMSampleBufferRef outBuffer; + + status = CMSampleBufferCreateReadyWithImageBuffer( + kCFAllocatorDefault, + pixelBufferRef, + formatDescription, + &timingInfo, + &outBuffer + ); + + if (![self.writerInput appendSampleBuffer:outBuffer]) { + NSLog(@"Frame not appended %@", self.assetWriter.error); + } + #if TARGET_OS_IPHONE + if (shouldRelease) { + CVPixelBufferRelease(pixelBufferRef); + } + #endif +} + +- (void)stop:(FlutterResult _Nonnull) result { + if (_audioSink != nil) { + _audioSink.bufferCallback = nil; + [_audioSink close]; + } + [self.videoTrack removeRenderer:self]; + [self.writerInput markAsFinished]; + [_audioWriter markAsFinished]; + dispatch_async(dispatch_get_main_queue(), ^{ + [self.assetWriter finishWritingWithCompletionHandler:^{ + NSError* error = self.assetWriter.error; + if (error == nil) { + result(nil); + } else { + result([FlutterError errorWithCode:@"Failed to save recording" + message:[error localizedDescription] + details:nil]); + } + }]; + }); +} + +@end diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.h b/common/darwin/Classes/FlutterWebRTCPlugin.h index bd2ae35c5b..ee39d6345f 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.h +++ b/common/darwin/Classes/FlutterWebRTCPlugin.h @@ -10,6 +10,7 @@ @class FlutterRTCVideoRenderer; @class FlutterRTCFrameCapturer; +@class FlutterRTCMediaRecorder; @class AudioManager; void postEvent(FlutterEventSink _Nonnull sink, id _Nullable event); @@ -29,18 +30,25 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); > @property(nonatomic, strong) RTCPeerConnectionFactory* _Nullable peerConnectionFactory; -@property(nonatomic, strong) NSMutableDictionary* _Nullable peerConnections; -@property(nonatomic, strong) NSMutableDictionary* _Nullable localStreams; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable peerConnections; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable localStreams; @property(nonatomic, strong) NSMutableDictionary>* _Nullable localTracks; -@property(nonatomic, strong) NSMutableDictionary* _Nullable renders; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable renders; +@property(nonatomic, strong) NSMutableDictionary* recorders; @property(nonatomic, strong) NSMutableDictionary* _Nullable videoCapturerStopHandlers; -@property(nonatomic, strong) NSMutableDictionary* _Nullable frameCryptors; -@property(nonatomic, strong) NSMutableDictionary* _Nullable keyProviders; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable frameCryptors; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable keyProviders; #if TARGET_OS_IPHONE -@property(nonatomic, retain) UIViewController* _Nullable viewController; /*for broadcast or ReplayKit */ +@property(nonatomic, retain) + UIViewController* _Nullable viewController; /*for broadcast or ReplayKit */ #endif @property(nonatomic, strong) FlutterEventSink _Nullable eventSink; @@ -49,8 +57,8 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); @property(nonatomic, strong) FlutterRTCFrameCapturer* _Nullable frameCapturer; @property(nonatomic, strong) AVAudioSessionPort _Nullable preferredInput; -@property(nonatomic, strong) NSString * _Nonnull focusMode; -@property(nonatomic, strong) NSString * _Nonnull exposureMode; +@property(nonatomic, strong) NSString* _Nonnull focusMode; +@property(nonatomic, strong) NSString* _Nonnull exposureMode; @property(nonatomic) BOOL _usingFrontCamera; @property(nonatomic) NSInteger _lastTargetWidth; @@ -59,10 +67,15 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); @property(nonatomic, strong) AudioManager* _Nullable audioManager; -- (RTCMediaStream* _Nullable)streamForId:(NSString* _Nonnull)streamId peerConnectionId:(NSString* _Nullable)peerConnectionId; -- (RTCMediaStreamTrack* _Nullable)trackForId:(NSString* _Nonnull)trackId peerConnectionId:(NSString* _Nullable)peerConnectionId; -- (RTCRtpTransceiver* _Nullable)getRtpTransceiverById:(RTCPeerConnection* _Nonnull)peerConnection Id:(NSString* _Nullable)Id; -- (NSDictionary* _Nullable)mediaStreamToMap:(RTCMediaStream* _Nonnull)stream ownerTag:(NSString* _Nullable)ownerTag; +- (RTCMediaStream* _Nullable)streamForId:(NSString* _Nonnull)streamId + peerConnectionId:(NSString* _Nullable)peerConnectionId; +- (RTCMediaStreamTrack* _Nullable)trackForId:(NSString* _Nonnull)trackId + peerConnectionId:(NSString* _Nullable)peerConnectionId; +- (NSString*)audioTrackIdForVideoTrackId:(NSString*)videoTrackId; +- (RTCRtpTransceiver* _Nullable)getRtpTransceiverById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nullable)Id; +- (NSDictionary* _Nullable)mediaStreamToMap:(RTCMediaStream* _Nonnull)stream + ownerTag:(NSString* _Nullable)ownerTag; - (NSDictionary* _Nullable)mediaTrackToMap:(RTCMediaStreamTrack* _Nonnull)track; - (NSDictionary* _Nullable)receiverToMap:(RTCRtpReceiver* _Nonnull)receiver; - (NSDictionary* _Nullable)transceiverToMap:(RTCRtpTransceiver* _Nonnull)transceiver; @@ -73,9 +86,11 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); - (void)ensureAudioSession; - (void)deactiveRtcAudioSession; -- (RTCRtpReceiver* _Nullable)getRtpReceiverById:(RTCPeerConnection* _Nonnull)peerConnection Id:(NSString* _Nonnull)Id; -- (RTCRtpSender* _Nullable)getRtpSenderById:(RTCPeerConnection* _Nonnull)peerConnection Id:(NSString* _Nonnull)Id; +- (RTCRtpReceiver* _Nullable)getRtpReceiverById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nonnull)Id; +- (RTCRtpSender* _Nullable)getRtpSenderById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nonnull)Id; -+ (FlutterWebRTCPlugin * _Nullable)sharedSingleton; ++ (FlutterWebRTCPlugin* _Nullable)sharedSingleton; @end diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.m b/common/darwin/Classes/FlutterWebRTCPlugin.m index 4c3cf8726f..6a4749d9db 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.m +++ b/common/darwin/Classes/FlutterWebRTCPlugin.m @@ -6,6 +6,7 @@ #import "FlutterRTCMediaStream.h" #import "FlutterRTCPeerConnection.h" #import "FlutterRTCVideoRenderer.h" +#import "FlutterRTCMediaRecorder.h" #import "FlutterRTCFrameCryptor.h" #if TARGET_OS_IPHONE #import "FlutterRTCVideoPlatformViewFactory.h" @@ -184,6 +185,7 @@ - (instancetype)initWithChannel:(FlutterMethodChannel*)channel self.frameCryptors = [NSMutableDictionary new]; self.keyProviders = [NSMutableDictionary new]; self.videoCapturerStopHandlers = [NSMutableDictionary new]; + self.recorders = [NSMutableDictionary new]; #if TARGET_OS_IPHONE self.focusMode = @"locked"; self.exposureMode = @"locked"; @@ -1505,7 +1507,43 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { message:[NSString stringWithFormat:@"Error: peerConnection not found!"] details:nil]); } - } else { + } else if ([@"startRecordToFile" isEqualToString:call.method]){ + #if TARGET_OS_IOS + NSDictionary* argsMap = call.arguments; + NSNumber* recorderId = argsMap[@"recorderId"]; + NSString* path = argsMap[@"path"]; + NSString* trackId = argsMap[@"videoTrackId"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* audioTrackId = [self audioTrackIdForVideoTrackId:trackId]; + + RTCMediaStreamTrack *track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + RTCMediaStreamTrack *audioTrack = [self trackForId:audioTrackId peerConnectionId:peerConnectionId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + NSURL* pathUrl = [NSURL fileURLWithPath:path]; + self.recorders[recorderId] = [[FlutterRTCMediaRecorder alloc] + initWithVideoTrack:(RTCVideoTrack *)track + audioTrack:(RTCAudioTrack *)audioTrack + outputFile:pathUrl + ]; + } + result(nil); + #endif + + } else if ([@"stopRecordToFile" isEqualToString:call.method]) { + #if TARGET_OS_IOS + NSDictionary* argsMap = call.arguments; + NSNumber* recorderId = argsMap[@"recorderId"]; + FlutterRTCMediaRecorder* recorder = self.recorders[recorderId]; + if (recorder != nil) { + [recorder stop:result]; + [self.recorders removeObjectForKey:recorderId]; + } else { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@ failed",call.method] + message:[NSString stringWithFormat:@"Error: recorder with id %@ not found!",recorderId] + details:nil]); + } + #endif + } else { [self handleFrameCryptorMethodCall:call result:result]; } } @@ -1629,6 +1667,38 @@ - (RTCMediaStreamTrack* _Nullable)remoteTrackForId:(NSString* _Nonnull)trackId { return mediaStreamTrack; } +- (NSString *)audioTrackIdForVideoTrackId:(NSString *)videoTrackId { + NSString *audioTrackId = nil; + + // Iterate through all peerConnections + for (NSString *peerConnectionId in self.peerConnections) { + RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId]; + + // Iterate through the receivers to find the video track + for (RTCRtpReceiver *receiver in peerConnection.receivers) { + RTCMediaStreamTrack *track = [receiver valueForKey:@"track"]; + if ([track.kind isEqualToString:@"video"] && [track.trackId isEqualToString:videoTrackId]) { + // Found the video track, now look for the audio track in the same peerConnection + for (RTCRtpReceiver *audioReceiver in peerConnection.receivers) { + RTCMediaStreamTrack *audioTrack = [audioReceiver valueForKey:@"track"]; + if ([audioTrack.kind isEqualToString:@"audio"]) { + audioTrackId = audioTrack.trackId; + break; + } + } + break; + } + } + + // If the audioTrackId is found, break out of the loop + if (audioTrackId != nil) { + break; + } + } + + return audioTrackId; +} + - (RTCMediaStreamTrack*)trackForId:(NSString*)trackId peerConnectionId:(NSString*)peerConnectionId { id track = _localTracks[trackId]; RTCMediaStreamTrack *mediaStreamTrack = nil; diff --git a/common/darwin/Classes/RTCAudioSource+Private.h b/common/darwin/Classes/RTCAudioSource+Private.h new file mode 100644 index 0000000000..6e45d12fbf --- /dev/null +++ b/common/darwin/Classes/RTCAudioSource+Private.h @@ -0,0 +1,14 @@ +#ifdef __cplusplus +#import "WebRTC/RTCAudioSource.h" +#include "media_stream_interface.h" + +@interface RTCAudioSource () + +/** + * The AudioSourceInterface object passed to this RTCAudioSource during + * construction. + */ +@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioSource; + +@end +#endif diff --git a/common/darwin/Classes/audio_sink_bridge.cpp b/common/darwin/Classes/audio_sink_bridge.cpp new file mode 100644 index 0000000000..16ce8fa841 --- /dev/null +++ b/common/darwin/Classes/audio_sink_bridge.cpp @@ -0,0 +1,27 @@ +#include "media_stream_interface.h" +#include "FlutterRTCAudioSink-Interface.h" + +class AudioSinkBridge : public webrtc::AudioTrackSinkInterface { +private: + void* sink; + +public: + AudioSinkBridge(void* sink1) { + sink = sink1; + } + void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) override + { + RTCAudioSinkCallback(sink, + audio_data, + bits_per_sample, + sample_rate, + number_of_channels, + number_of_frames + ); + }; + int NumPreferredChannels() const override { return 1; } +}; diff --git a/common/darwin/Classes/media_stream_interface.h b/common/darwin/Classes/media_stream_interface.h new file mode 100644 index 0000000000..e25553f9fa --- /dev/null +++ b/common/darwin/Classes/media_stream_interface.h @@ -0,0 +1,199 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +/// Source https://webrtc.googlesource.com/src/+/master/api/media_stream_interface.h + +#ifdef __cplusplus +#ifndef API_MEDIA_STREAM_INTERFACE_H_ +#define API_MEDIA_STREAM_INTERFACE_H_ + +#include +#include +#include +#include +#include + +namespace webrtc { + + // Generic observer interface. + class ObserverInterface { + public: + virtual void OnChanged() = 0; + protected: + virtual ~ObserverInterface() {} + }; + class NotifierInterface { + public: + virtual void RegisterObserver(ObserverInterface* observer) = 0; + virtual void UnregisterObserver(ObserverInterface* observer) = 0; + virtual ~NotifierInterface() {} + }; + + enum class RefCountReleaseStatus { kDroppedLastRef, kOtherRefsRemained }; + // Interfaces where refcounting is part of the public api should + // inherit this abstract interface. The implementation of these + // methods is usually provided by the RefCountedObject template class, + // applied as a leaf in the inheritance tree. + class RefCountInterface { + public: + virtual void AddRef() const = 0; + virtual RefCountReleaseStatus Release() const = 0; + // Non-public destructor, because Release() has exclusive responsibility for + // destroying the object. + protected: + virtual ~RefCountInterface() {} + }; + + // Base class for sources. A MediaStreamTrack has an underlying source that + // provides media. A source can be shared by multiple tracks. + class MediaSourceInterface : public RefCountInterface, + public NotifierInterface { + public: + enum SourceState { kInitializing, kLive, kEnded, kMuted }; + virtual SourceState state() const = 0; + virtual bool remote() const = 0; + protected: + ~MediaSourceInterface() override = default; + }; + + // Interface for receiving audio data from a AudioTrack. + class AudioTrackSinkInterface { + public: + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) { + + }; + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + void* absolute_capture_timestamp_ms) { + // TODO(bugs.webrtc.org/10739): Deprecate the old OnData and make this one + // pure virtual. + return OnData(audio_data, bits_per_sample, sample_rate, number_of_channels, + number_of_frames); + } + virtual int NumPreferredChannels() const { return -1; } + protected: + virtual ~AudioTrackSinkInterface() {} + }; + // AudioSourceInterface is a reference counted source used for AudioTracks. + // The same source can be used by multiple AudioTracks. + class AudioSourceInterface : public MediaSourceInterface { + public: + class AudioObserver { + public: + virtual void OnSetVolume(double volume) = 0; + protected: + virtual ~AudioObserver() {} + }; + // TODO(deadbeef): Makes all the interfaces pure virtual after they're + // implemented in chromium. + // Sets the volume of the source. |volume| is in the range of [0, 10]. + // TODO(tommi): This method should be on the track and ideally volume should + // be applied in the track in a way that does not affect clones of the track. + virtual void SetVolume(double volume) {} + // Registers/unregisters observers to the audio source. + virtual void RegisterAudioObserver(AudioObserver* observer) {} + virtual void UnregisterAudioObserver(AudioObserver* observer) {} + // TODO(tommi): Make pure virtual. + virtual void AddSink(AudioTrackSinkInterface* sink) {} + virtual void RemoveSink(AudioTrackSinkInterface* sink) {} + // Returns options for the AudioSource. + // (for some of the settings this approach is broken, e.g. setting + // audio network adaptation on the source is the wrong layer of abstraction). +// virtual const AudioOptions options() const; + }; +} +namespace rtc { + + template + class scoped_refptr { + public: + typedef T element_type; + scoped_refptr() : ptr_(nullptr) {} + scoped_refptr(std::nullptr_t) : ptr_(nullptr) {} // NOLINT(runtime/explicit) + explicit scoped_refptr(T* p) : ptr_(p) { + if (ptr_) + ptr_->AddRef(); + } + scoped_refptr(const scoped_refptr& r) : ptr_(r.ptr_) { + if (ptr_) + ptr_->AddRef(); + } + template + scoped_refptr(const scoped_refptr& r) : ptr_(r.get()) { + if (ptr_) + ptr_->AddRef(); + } + // Move constructors. + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + template + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + ~scoped_refptr() { + if (ptr_) + ptr_->Release(); + } + T* get() const { return ptr_; } + explicit operator bool() const { return ptr_ != nullptr; } + T& operator*() const { return *ptr_; } + T* operator->() const { return ptr_; } + // Returns the (possibly null) raw pointer, and makes the scoped_refptr hold a + // null pointer, all without touching the reference count of the underlying + // pointed-to object. The object is still reference counted, and the caller of + // release() is now the proud owner of one reference, so it is responsible for + // calling Release() once on the object when no longer using it. + T* release() { + T* retVal = ptr_; + ptr_ = nullptr; + return retVal; + } + scoped_refptr& operator=(T* p) { + // AddRef first so that self assignment should work + if (p) + p->AddRef(); + if (ptr_) + ptr_->Release(); + ptr_ = p; + return *this; + } + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.ptr_; + } + template + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.get(); + } + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + template + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + void swap(T** pp) noexcept { + T* p = ptr_; + ptr_ = *pp; + *pp = p; + } + void swap(scoped_refptr& r) noexcept { swap(&r.ptr_); } + protected: + T* ptr_; + }; +}; + +#endif // API_MEDIA_STREAM_INTERFACE_H_ +#endif // __cplusplus diff --git a/example/lib/src/get_user_media_sample.dart b/example/lib/src/get_user_media_sample.dart index 33e89779be..e02e65dd37 100644 --- a/example/lib/src/get_user_media_sample.dart +++ b/example/lib/src/get_user_media_sample.dart @@ -5,6 +5,7 @@ import 'dart:math'; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:gallery_saver_plus/gallery_saver.dart'; import 'package:path_provider/path_provider.dart'; /* @@ -24,6 +25,7 @@ class _GetUserMediaSampleState extends State { bool _isTorchOn = false; bool _isFrontCamera = true; MediaRecorder? _mediaRecorder; + String? _mediaRecorderFilePath; bool get _isRec => _mediaRecorder != null; @@ -101,33 +103,60 @@ class _GetUserMediaSampleState extends State { void _startRecording() async { if (_localStream == null) throw Exception('Stream is not initialized'); - if (Platform.isIOS) { - print('Recording is not available on iOS'); - return; + // TODO(rostopira): request write storage permission + final timestamp = DateTime.now().millisecondsSinceEpoch; + + if (!(Platform.isAndroid || Platform.isIOS || Platform.isMacOS)) { + throw 'Unsupported platform'; } - // TODO(rostopira): request write storage permission - final storagePath = await getExternalStorageDirectory(); - if (storagePath == null) throw Exception('Can\'t find storagePath'); + final tempDir = await getTemporaryDirectory(); + if (!(await tempDir.exists())) { + await tempDir.create(recursive: true); + } + + _mediaRecorderFilePath = '${tempDir.path}/$timestamp.mp4'; + + if (_mediaRecorderFilePath == null) { + throw Exception('Can\'t find storagePath'); + } - final filePath = storagePath.path + '/webrtc_sample/test.mp4'; + final file = File(_mediaRecorderFilePath!); + if (await file.exists()) { + await file.delete(); + } _mediaRecorder = MediaRecorder(); setState(() {}); final videoTrack = _localStream! .getVideoTracks() .firstWhere((track) => track.kind == 'video'); + await _mediaRecorder!.start( - filePath, + _mediaRecorderFilePath!, videoTrack: videoTrack, + audioChannel: RecorderAudioChannel.OUTPUT, ); } void _stopRecording() async { - await _mediaRecorder?.stop(); + if (_mediaRecorderFilePath == null) { + return; + } + + // album name works only for android, for ios use gallerySaver + await _mediaRecorder?.stop(albumName: 'FlutterWebRTC'); setState(() { _mediaRecorder = null; }); + + // this is only for ios, android already saves to albumName + await GallerySaver.saveVideo( + _mediaRecorderFilePath!, + albumName: 'FlutterWebRTC', + ); + + _mediaRecorderFilePath = null; } void onViewFinderTap(TapDownDetails details, BoxConstraints constraints) { diff --git a/example/pubspec.yaml b/example/pubspec.yaml index d9f28489eb..9c5d1492b5 100644 --- a/example/pubspec.yaml +++ b/example/pubspec.yaml @@ -3,8 +3,8 @@ description: Demonstrates how to use the webrtc plugin. version: 1.0.0 publish_to: none environment: - sdk: '>=3.3.0 <4.0.0' - + sdk: ">=3.3.0 <4.0.0" + dependencies: # The following adds the Cupertino Icons font to your application. # Use with the CupertinoIcons class for iOS style icons. @@ -15,6 +15,7 @@ dependencies: flutter_webrtc: path: ../ # Required for MediaRecorder example + gallery_saver_plus: 3.2.4 path_provider: ^2.0.2 permission_handler: ^11.3.1 sdp_transform: ^0.3.2 @@ -30,7 +31,6 @@ dev_dependencies: # The following section is specific to Flutter. flutter: - # The following line ensures that the Material Icons font is # included with your application, so that you can use the icons in # the material Icons class. @@ -64,5 +64,5 @@ flutter: # - asset: fonts/TrajanPro_Bold.ttf # weight: 700 # - # For details regarding fonts from package dependencies, + # For details regarding fonts from package dependencies, # see https://flutter.io/custom-fonts/#from-packages diff --git a/ios/Classes/FlutterRTCAudioSink-Interface.h b/ios/Classes/FlutterRTCAudioSink-Interface.h new file mode 120000 index 0000000000..940c06d646 --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink-Interface.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink-Interface.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCAudioSink.h b/ios/Classes/FlutterRTCAudioSink.h new file mode 120000 index 0000000000..5242de9e22 --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCAudioSink.mm b/ios/Classes/FlutterRTCAudioSink.mm new file mode 120000 index 0000000000..c15372c4ed --- /dev/null +++ b/ios/Classes/FlutterRTCAudioSink.mm @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCAudioSink.mm \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaRecorder.h b/ios/Classes/FlutterRTCMediaRecorder.h new file mode 120000 index 0000000000..31ca7e3b5f --- /dev/null +++ b/ios/Classes/FlutterRTCMediaRecorder.h @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaRecorder.h \ No newline at end of file diff --git a/ios/Classes/FlutterRTCMediaRecorder.m b/ios/Classes/FlutterRTCMediaRecorder.m new file mode 120000 index 0000000000..1c2b1bf1a8 --- /dev/null +++ b/ios/Classes/FlutterRTCMediaRecorder.m @@ -0,0 +1 @@ +../../common/darwin/Classes/FlutterRTCMediaRecorder.m \ No newline at end of file diff --git a/ios/Classes/RTCAudioSource+Private.h b/ios/Classes/RTCAudioSource+Private.h new file mode 120000 index 0000000000..7ce3b77fd6 --- /dev/null +++ b/ios/Classes/RTCAudioSource+Private.h @@ -0,0 +1 @@ +../../common/darwin/Classes/RTCAudioSource+Private.h \ No newline at end of file diff --git a/ios/Classes/audio_sink_bridge.cpp b/ios/Classes/audio_sink_bridge.cpp new file mode 120000 index 0000000000..13215e8454 --- /dev/null +++ b/ios/Classes/audio_sink_bridge.cpp @@ -0,0 +1 @@ +../../common/darwin/Classes/audio_sink_bridge.cpp \ No newline at end of file diff --git a/ios/Classes/media_stream_interface.h b/ios/Classes/media_stream_interface.h new file mode 120000 index 0000000000..5810a86316 --- /dev/null +++ b/ios/Classes/media_stream_interface.h @@ -0,0 +1 @@ +../../common/darwin/Classes/media_stream_interface.h \ No newline at end of file diff --git a/ios/flutter_webrtc.podspec b/ios/flutter_webrtc.podspec index 32845d4e0a..379bae55dd 100644 --- a/ios/flutter_webrtc.podspec +++ b/ios/flutter_webrtc.podspec @@ -18,4 +18,9 @@ A new flutter plugin project. s.dependency 'WebRTC-SDK', '125.6422.06' s.ios.deployment_target = '13.0' s.static_framework = true + s.pod_target_xcconfig = { + 'CLANG_CXX_LANGUAGE_STANDARD' => 'c++14', + 'USER_HEADER_SEARCH_PATHS' => 'Classes/**/*.h' + } + s.libraries = 'c++' end diff --git a/lib/src/media_recorder.dart b/lib/src/media_recorder.dart index 2f45d63fd4..2295dd353e 100644 --- a/lib/src/media_recorder.dart +++ b/lib/src/media_recorder.dart @@ -7,12 +7,22 @@ class MediaRecorder extends rtc.MediaRecorder { final rtc.MediaRecorder _delegate; @override - Future start(String path, - {MediaStreamTrack? videoTrack, RecorderAudioChannel? audioChannel}) => - _delegate.start(path, videoTrack: videoTrack, audioChannel: audioChannel); + Future start( + String path, { + MediaStreamTrack? videoTrack, + RecorderAudioChannel? audioChannel, + int rotationDegrees = 0, + }) { + return _delegate.start( + path, + videoTrack: videoTrack, + audioChannel: audioChannel, + ); + } @override - Future stop() => _delegate.stop(); + Future stop({String? albumName}) => + _delegate.stop(albumName: albumName ?? "FlutterWebRtc"); @override void startWeb( diff --git a/lib/src/native/media_recorder_impl.dart b/lib/src/native/media_recorder_impl.dart index 90b8aa0632..795e4c2127 100644 --- a/lib/src/native/media_recorder_impl.dart +++ b/lib/src/native/media_recorder_impl.dart @@ -9,12 +9,14 @@ import 'utils.dart'; class MediaRecorderNative extends MediaRecorder { static final _random = Random(); final _recorderId = _random.nextInt(0x7FFFFFFF); + var _isStarted = false; @override - Future start(String path, - {MediaStreamTrack? videoTrack, RecorderAudioChannel? audioChannel - // TODO(cloudwebrtc): add codec/quality options - }) async { + Future start( + String path, { + MediaStreamTrack? videoTrack, + RecorderAudioChannel? audioChannel, + }) async { if (audioChannel == null && videoTrack == null) { throw Exception('Neither audio nor video track were provided'); } @@ -28,6 +30,7 @@ class MediaRecorderNative extends MediaRecorder { ? videoTrack.peerConnectionId : null }); + _isStarted = true; } @override @@ -39,6 +42,13 @@ class MediaRecorderNative extends MediaRecorder { } @override - Future stop() async => await WebRTC.invokeMethod( - 'stopRecordToFile', {'recorderId': _recorderId}); + Future stop({String? albumName}) async { + if (!_isStarted) { + throw "Media recorder not started!"; + } + return await WebRTC.invokeMethod('stopRecordToFile', { + 'recorderId': _recorderId, + 'albumName': albumName, + }); + } } diff --git a/linux/CMakeLists.txt b/linux/CMakeLists.txt index 9d06ed2d7b..5e968390f2 100644 --- a/linux/CMakeLists.txt +++ b/linux/CMakeLists.txt @@ -24,6 +24,7 @@ add_library(${PLUGIN_NAME} SHARED "flutter/core_implementations.cc" "flutter/standard_codec.cc" "flutter/plugin_registrar.cc" + "task_runner_linux.cc" ) include_directories( diff --git a/linux/flutter_webrtc_plugin.cc b/linux/flutter_webrtc_plugin.cc index 79ce73d0f3..336204d55a 100644 --- a/linux/flutter_webrtc_plugin.cc +++ b/linux/flutter_webrtc_plugin.cc @@ -2,6 +2,7 @@ #include "flutter_common.h" #include "flutter_webrtc.h" +#include "task_runner_linux.h" const char* kChannelName = "FlutterWebRTC.Method"; @@ -37,7 +38,7 @@ class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { TextureRegistrar* textures() { return textures_; } - TaskRunner* task_runner() { return nullptr; } + TaskRunner* task_runner() { return task_runner_.get(); } private: // Creates a plugin that communicates on the given channel. @@ -45,7 +46,8 @@ class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { std::unique_ptr channel) : channel_(std::move(channel)), messenger_(registrar->messenger()), - textures_(registrar->texture_registrar()) { + textures_(registrar->texture_registrar()), + task_runner_(std::make_unique()) { webrtc_ = std::make_unique(this); } @@ -63,6 +65,7 @@ class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { std::unique_ptr webrtc_; BinaryMessenger* messenger_; TextureRegistrar* textures_; + std::unique_ptr task_runner_; }; } // namespace flutter_webrtc_plugin diff --git a/linux/task_runner_linux.cc b/linux/task_runner_linux.cc new file mode 100644 index 0000000000..1c8a3f3240 --- /dev/null +++ b/linux/task_runner_linux.cc @@ -0,0 +1,31 @@ +#include "task_runner_linux.h" + +#include + +namespace flutter_webrtc_plugin { + +void TaskRunnerLinux::EnqueueTask(TaskClosure task) { + { + std::lock_guard lock(tasks_mutex_); + tasks_.push(std::move(task)); + } + + GMainContext* context = g_main_context_default(); + if (context) { + g_main_context_invoke( + context, + [](gpointer user_data) -> gboolean { + TaskRunnerLinux* runner = static_cast(user_data); + std::lock_guard lock(runner->tasks_mutex_); + while (!runner->tasks_.empty()) { + TaskClosure task = std::move(runner->tasks_.front()); + runner->tasks_.pop(); + task(); + } + return G_SOURCE_REMOVE; + }, + this); + } +} + +} // namespace flutter_webrtc_plugin diff --git a/linux/task_runner_linux.h b/linux/task_runner_linux.h new file mode 100644 index 0000000000..cff94639f7 --- /dev/null +++ b/linux/task_runner_linux.h @@ -0,0 +1,26 @@ +#ifndef PACKAGES_FLUTTER_WEBRTC_LINUX_TASK_RUNNER_LINUX_H_ +#define PACKAGES_FLUTTER_WEBRTC_LINUX_TASK_RUNNER_LINUX_H_ + +#include +#include +#include +#include "task_runner.h" + +namespace flutter_webrtc_plugin { + +class TaskRunnerLinux : public TaskRunner { + public: + TaskRunnerLinux() = default; + ~TaskRunnerLinux() override = default; + + // TaskRunner implementation. + void EnqueueTask(TaskClosure task) override; + + private: + std::mutex tasks_mutex_; + std::queue tasks_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // PACKAGES_FLUTTER_WEBRTC_LINUX_TASK_RUNNER_LINUX_H_ \ No newline at end of file diff --git a/pubspec.yaml b/pubspec.yaml index bfac287e6d..62677b5477 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,19 +1,19 @@ name: flutter_webrtc description: Flutter WebRTC plugin for iOS/Android/Destkop/Web, based on GoogleWebRTC. -version: 0.13.1 +version: 0.13.2 homepage: https://github.com/cloudwebrtc/flutter-webrtc environment: - sdk: '>=3.3.0 <4.0.0' - flutter: '>=1.22.0' + sdk: ">=3.3.0 <4.0.0" + flutter: ">=1.22.0" dependencies: collection: ^1.17.0 - dart_webrtc: ^1.5.3 + dart_webrtc: ^1.5.4 flutter: sdk: flutter path_provider: ^2.0.2 web: ^1.0.0 - webrtc_interface: ^1.2.2+hotfix.1 + webrtc_interface: ^1.2.3 dev_dependencies: flutter_test: diff --git a/windows/flutter_webrtc_plugin.cc b/windows/flutter_webrtc_plugin.cc index bfe08328dd..836989c00e 100644 --- a/windows/flutter_webrtc_plugin.cc +++ b/windows/flutter_webrtc_plugin.cc @@ -4,6 +4,8 @@ #include "flutter_webrtc.h" #include "task_runner_windows.h" +#include + const char* kChannelName = "FlutterWebRTC.Method"; namespace flutter_webrtc_plugin { @@ -71,7 +73,7 @@ class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { void FlutterWebRTCPluginRegisterWithRegistrar( FlutterDesktopPluginRegistrarRef registrar) { - static auto* plugin_registrar = new flutter::PluginRegistrar(registrar); - flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( - plugin_registrar); + flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( + flutter::PluginRegistrarManager::GetInstance() + ->GetRegistrar(registrar)); } \ No newline at end of file