8000 Add captureFrame support for iOS · linuxerwang/flutter-webrtc@d48b836 · GitHub
[go: up one dir, main page]

Skip to content
Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit d48b836

Browse files
committed
Add captureFrame support for iOS
1 parent 8bfbd00 commit d48b836
Copy full SHA for d48b836

File tree

6 files changed

+150
-12
lines changed

6 files changed

+150
-12
lines changed

example/lib/src/get_user_media_sample.dart

Lines changed: 29 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ class _GetUserMediaSampleState extends State<GetUserMediaSample> {
4747
"audio": false,
4848
"video": {
4949
"mandatory": {
50-
"minWidth":'1280', // Provide your own width, height and frame rate here
50+
"minWidth": '1280', // Provide your own width, height and frame rate here
5151
"minHeight": '720',
5252
"minFrameRate": '30',
5353
},
@@ -92,7 +92,7 @@ class _GetUserMediaSampleState extends State<GetUserMediaSample> {
9292
final storagePath = await getExternalStorageDirectory();
9393
final filePath = storagePath.path + '/webrtc_sample/test.mp4';
9494
_mediaRecorder = MediaRecorder();
95-
setState((){});
95+
setState(() {});
9696
await _localStream.getMediaTracks();
9797
final videoTrack = _localStream.getVideoTracks().firstWhere((track) => track.kind == "video");
9898
await _mediaRecorder.start(
@@ -103,22 +103,42 @@ class _GetUserMediaSampleState extends State<GetUserMediaSample> {
103103

104104
_stopRecording() async {
105105
await _mediaRecorder?.stop();
106-
setState((){
106+
setState(() {
107107
_mediaRecorder = null;
108108
});
109109
}
110110

111+
_captureFrame() async {
112+
String filePath;
113+
if (Platform.isAndroid) {
114+
final storagePath = await getExternalStorageDirectory();
115+
filePath = storagePath.path + '/webrtc_sample/test.jpg';
116+
} else {
117+
final storagePath = await getApplicationDocumentsDirectory();
118+
filePath = storagePath.path + '/test${DateTime.now()}.jpg';
119+
}
120+
121+
final videoTrack = _localStream.getVideoTracks().firstWhere((track) => track.kind == "video");
122+
videoTrack.captureFrame(filePath);
123+
}
124+
111125
@override
112126
Widget build(BuildContext context) {
113127
return new Scaffold(
114128
appBar: new AppBar(
115129
title: new Text('GetUserMedia API Test'),
116-
actions: _inCalling ? <Widget>[
117-
new IconButton(
118-
icon: Icon(_isRec ? Icons.stop : Icons.fiber_manual_record),
119-
onPressed: _isRec ? _stopRecording : _startRecording,
120-
),
121-
] : null,
130+
actions: _inCalling
131+
? <Widget>[
132+
new IconButton(
133+
icon: Icon(Icons.camera),
134+
onPressed: _captureFrame,
135+
),
136+
new IconButton(
137+
icon: Icon(_isRec ? Icons.stop : Icons.fiber_manual_record),
138+
onPressed: _isRec ? _stopRecording : _startRecording,
139+
),
140+
]
141+
: null,
122142
),
123143
body: new OrientationBuilder(
124144
builder: (context, orientation) {

ios/Classes/FlutterRTCFrameCapturer.h

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
#import <Flutter/Flutter.h>
2+
#import <WebRTC/WebRTC.h>
3+
4+
@interface FlutterRTCFrameCapturer : NSObject<RTCVideoRenderer>
5+
6+
- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result;
7+
8+
@end

ios/Classes/FlutterRTCFrameCapturer.m

Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
#import <Flutter/Flutter.h>
2+
3+
#import "FlutterRTCFrameCapturer.h"
4+
5+
#include "libyuv.h"
6+
7+
@import CoreImage;
8+
@import CoreVideo;
9+
10+
@implementation FlutterRTCFrameCapturer {
11+
RTCVideoTrack* _track;
12+
NSString* _path;
13+
FlutterResult _result;
14+
bool _gotFrame;
15+
}
16+
17+
- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result
18+
{
19+
self = [super init];
20+
if (self) {
21+
_gotFrame = false;
22+
_track = track;
23+
_path = path;
24+
_result = result;
25+
[track addRenderer:self];
26+
}
27+
return self;
28+
}
29+
30+
- (void)setSize:(CGSize)size
31+
{
32+
}
33+
34+
- (void)renderFrame:(nullable RTCVideoFrame *)frame
35+
{
36+
if (_gotFrame) return;
37+
_gotFrame = true;
38+
39+
dispatch_async(dispatch_get_main_queue(), ^{
40+
[self->_track removeRenderer:self];
41+
});
42+
43+
id<RTCVideoFrameBuffer> buffer = [frame buffer];
44+
id<RTCI420Buffer> i420Buffer = [buffer toI420];
45+
46+
CVPixelBufferRef pixelBuffer = nil;
47+
CVPixelBufferCreate(kCFAllocatorDefault, i420Buffer.width, i420Buffer.height, kCVPixelFormatType_32ARGB, nil, &pixelBuffer);
48+
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
49+
50+
uint8_t* dst = CVPixelBufferGetBaseAddress(pixelBuffer);
51+
const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
52+
53+
I420ToBGRA(i420Buffer.dataY,
54+
i420Buffer.strideY,
55+
i420Buffer.dataU,
56+
i420Buffer.strideU,
57+
i420Buffer.dataV,
58+
i420Buffer.strideV,
59+
dst,
60+
(int)bytesPerRow,
61+
i420Buffer.width,
62+
i420Buffer.height);
63+
64+
CIContext *context = [[CIContext alloc] init];
65+
CIImage *coreImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer];
66+
67+
CIImage *rotatedImage;
68+
switch (frame.rotation) {
69+
case RTCVideoRotation_0: rotatedImage = coreImage; break;
70+
case RTCVideoRotation_90: rotatedImage = [coreImage imageByApplyingOrientation:kCGImagePropertyOrientationRight]; break;
71+
case RTCVideoRotation_180: rotatedImage = [coreImage imageByApplyingOrientation:kCGImagePropertyOrientationDown]; break;
72+
case RTCVideoRotation_270: rotatedImage = [coreImage imageByApplyingOrientation:kCGImagePropertyOrientationLeft]; break;
73+
}
74+
75+
NSData* data = [context JPEGRepresentationOfImage:rotatedImage colorSpace:rotatedImage.colorSpace options:@{}];
76+
77+
[data writeToFile:_path atomically:NO];
78+
79+
_result(nil);
80+
}
81+
82+
@end

ios/Classes/FlutterRTCMediaStream.h

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313

1414
-(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track
1515
result:(FlutterResult) result;
16-
@end
17-
1816

17+
-(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track
18+
toPath:(NSString *) path
19+
result:(FlutterResult) result;
20+
@end

ios/Classes/FlutterRTCMediaStream.m

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
#import <WebRTC/WebRTC.h>
44

5+
#import "FlutterRTCFrameCapturer.h"
56
#import "FlutterRTCMediaStream.h"
67
#import "FlutterRTCPeerConnection.h"
78
#import "FlutterRPScreenRecorder.h"
@@ -492,6 +493,16 @@ -(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track result:(Flutter
492493
}];
493494
}
494495

496+
-(void)mediaStreamTrackCaptureFrame:(RTCVideoTrack *)track toPath:(NSString *) path result:(FlutterResult)result
497+
{
498+
if (!self.videoCapturer) {
499+
NSLog(@"Video capturer is null. Can't capture frame.");
500+
return;
501+
}
502+
503+
FlutterRTCFrameCapturer *capturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track toPath:path result:result];
504+
}
505+
495506
-(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track
496507
{
497508
if (track) {

ios/Classes/FlutterWebRTCPlugin.m

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,6 +166,22 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result
166166
message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"]
167167
details:nil]);
168168
}
169+
} else if ([@"captureFrame" isEqualToString:call.method]) {
170+
NSDictionary* argsMap = call.arguments;
171+
NSString* path = argsMap[@"path"];
172+
NSString* trackId = argsMap[@"trackId"];
173+
174+
RTCMediaStreamTrack *track = self.localTracks[trackId];
175+
if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) {
176+
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
177+
[self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result];
178+
} else {
179+
if (track == nil) {
180+
result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]);
181+
} else {
182+
result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]);
183+
}
184+
}
169185
} else if ([@"setLocalDescription" isEqualToString:call.method]) {
170186
NSDictionary* argsMap = call.arguments;
171187
NSString* peerConnectionId = argsMap[@"peerConnectionId"];
@@ -574,4 +590,3 @@ - (CGRect)parseRect:(NSDictionary *)rect {
574590
}
575591

576592
@end
577-

0 commit comments

Comments
 (0)
0