8000 Merge pull request #75 from LinusU/ios-capture-frame · linuxerwang/flutter-webrtc@d1282a1 · GitHub
[go: up one dir, main page]

Skip to content

Commit d1282a1

Browse files
authored
Merge pull request flutter-webrtc#75 from LinusU/ios-capture-frame
Add captureFrame support for iOS
2 parents 8bfbd
10000
00 + f21eb5f commit d1282a1

File tree

6 files changed

+150
-12
lines changed

6 files changed

+150
-12
lines changed

example/lib/src/get_user_media_sample.dart

Lines changed: 29 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ class _GetUserMediaSampleState extends State<GetUserMediaSample> {
4747
"audio": false,
4848
"video": {
4949
"mandatory": {
50-
"minWidth":'1280', // Provide your own width, height and frame rate here
50+
"minWidth": '1280', // Provide your own width, height and frame rate here
5151
"minHeight": '720',
5252
"minFrameRate": '30',
5353
},
@@ -92,7 +92,7 @@ class _GetUserMediaSampleState extends State<GetUserMediaSample> {
9292
final storagePath = await getExternalStorageDirectory();
9393
final filePath = storagePath.path + '/webrtc_sample/test.mp4';
9494
_mediaRecorder = MediaRecorder();
95-
setState((){});
95+
setState(() {});
9696
await _localStream.getMediaTracks();
9797
final videoTrack = _localStream.getVideoTracks().firstWhere((track) => track.kind == "video");
9898
await _mediaRecorder.start(
@@ -103,22 +103,42 @@ class _GetUserMediaSampleState extends State<GetUserMediaSample> {
103103

104104
_stopRecording() async {
105105
await _mediaRecorder?.stop();
106-
setState((){
106+
setState(() {
107107
_mediaRecorder = null;
108108
});
109109
}
110110

111+
_captureFrame() async {
112+
String filePath;
113+
if (Platform.isAndroid) {
114+
final storagePath = await getExternalStorageDirectory();
115+
filePath = storagePath.path + '/webrtc_sample/test.jpg';
116+
} else {
117+
final storagePath = await getApplicationDocumentsDirectory();
118+
filePath = storagePath.path + '/test${DateTime.now()}.jpg';
119+
}
120+
121+
final videoTrack = _localStream.getVideoTracks().firstWhere((track) => track.kind == "video");
122+
videoTrack.captureFrame(filePath);
123+
}
124+
111125
@override
112126
Widget build(BuildContext context) {
113127
return new Scaffold(
114128
appBar: new AppBar(
115129
title: new Text('GetUserMedia API Test'),
116-
actions: _inCalling ? <Widget>[
117-
new IconButton(
118-
icon: Icon(_isRec ? Icons.stop : Icons.fiber_manual_record),
119-
onPressed: _isRec ? _stopRecording : _startRecording,
120-
),
121-
] : null,
130+
actions: _inCalling
131+
? <Widget>[
132+
new IconButton(
133+
icon: Icon(Icons.camera),
134+
onPressed: _captureFrame,
135+
),
136+
new IconButton(
137+
icon: Icon(_isRec ? Icons.stop : Icons.fiber_manual_record),
138+
onPressed: _isRec ? _stopRecording : _startRecording,
139+
),
140+
]
141+
: null,
122142
),
123143
body: new OrientationBuilder(
124144
builder: (context, orientation) {

ios/Classes/FlutterRTCFrameCapturer.h

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
#import <Flutter/Flutter.h>
2+
#import <WebRTC/WebRTC.h>
3+
4+
@interface FlutterRTCFrameCapturer : NSObject<RTCVideoRenderer>
5+
6+
- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result;
7+
8+
@end

ios/Classes/FlutterRTCFrameCapturer.m

Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
#import <Flutter/Flutter.h>
2+
3+
#import "FlutterRTCFrameCapturer.h"
4+
5+
#include "libyuv.h"
6+
7+
@import CoreImage;
8+
@import CoreVideo;
9+
10+
@implementation FlutterRTCFrameCapturer {
11+
RTCVideoTrack* _track;
12+
NSString* _path;
13+
FlutterResult _result;
14+
bool _gotFrame;
15+
}
16+
17+
- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result
18+
{
19+
self = [super init];
20+
if (self) {
21+
_gotFrame = false;
22+
_track = track;
23+
_path = path;
24+
_result = result;
25+
[track addRenderer:self];
26+
}
27+
return self;
28+
}
29+
30+
- (void)setSize:(CGSize)size
31+
{
32+
}
33+
34+
- (void)renderFrame:(nullable RTCVideoFrame *)frame
35+
{
36+
if (_gotFrame || frame == nil) return;
37+
_gotFrame = true;
38+
39+
id<RTCVideoFrameBuffer> buffer = frame.buffer;
40+
CVPixelBufferRef pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer;
41+
42+
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef];
43+
CIContext *context = [CIContext contextWithOptions:nil];
44+
CGImageRef cgImage = [context createCGImage:ciImage
45+
fromRect:CGRectMake(0, 0, frame.width, frame.height)];
46+
47+
UIImageOrientation orientation;
48+
switch (frame.rotation) {
49+
case RTCVideoRotation_90:
50+
orientation = UIImageOrientationRight;
51+
break;
52+
case RTCVideoRotation_180:
53+
orientation = UIImageOrientationDown;
54+
break;
55+
case RTCVideoRotation_270:
56+
orientation = UIImageOrientationLeft;
57+
default:
58+
orientation = UIImageOrientationUp;
59+
break;
60+
}
61+
62+
UIImage *uiImage = [UIImage imageWithCGImage:cgImage scale:1 orientation:orientation];
63+
CGImageRelease(cgImage);
64+
NSData *jpgData = UIImageJPEGRepresentation(uiImage, 0.9f);
65+
66+
if ([jpgData writeToFile:_path atomically:NO]) {
67+
NSLog(@"File writed successfully to %@", _path);
68+
_result(nil);
69+
} else {
70+
NSLog(@"Failed to write to file");
71+
_result([FlutterError errorWithCode:@"CaptureFrameFailed"
72+
message:@"Failed to write JPEG data to file"
73+
details:nil]);
74+
}
75+
76+
dispatch_async(dispatch_get_main_queue(), ^{
77+
[self->_track removeRenderer:self];
78+
self->_track = nil;
79+
});
80+
}
81+
82+
@end

ios/Classes/FlutterRTCMediaStream.h

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313

1414
-(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track
1515
result:(FlutterResult) result;
16-
@end
17-
1816

17+
-(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track
18+
toPath:(NSString *) path
19+
result:(FlutterResult) result;
20+
@end

ios/Classes/FlutterRTCMediaStream.m

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
#import <WebRTC/WebRTC.h>
44

5+
#import "FlutterRTCFrameCapturer.h"
56
#import "FlutterRTCMediaStream.h"
67
#import "FlutterRTCPeerConnection.h"
78
#import "FlutterRPScreenRecorder.h"
@@ -492,6 +493,16 @@ -(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track result:(Flutter
492493
}];
493494
}
494495

496+
-(void)mediaStreamTrackCaptureFrame:(RTCVideoTrack *)track toPath:(NSString *) path result:(FlutterResult)result
497+
{
498+
if (!self.videoCapturer) {
499+
NSLog(@"Video capturer is null. Can't capture frame.");
500+
return;
501+
}
502+
503+
FlutterRTCFrameCapturer *capturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track toPath:path result:result];
504+
}
505+
495506
-(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track
496507
{
497508
if (track) {

ios/Classes/FlutterWebRTCPlugin.m

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,6 +166,22 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result
166166
message:[NSString stringWithFormat:@"Error: peerConnection or mediaStream not found!"]
167167
details:nil]);
168168
}
169+
} else if ([@"captureFrame" isEqualToString:call.method]) {
170+
NSDictionary* argsMap = call.arguments;
171+
NSString* path = argsMap[@"path"];
172+
NSString* trackId = argsMap[@"trackId"];
173+
174+
RTCMediaStreamTrack *track = self.localTracks[trackId];
175+
if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) {
176+
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
177+
[self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result];
178+
} else {
179+
if (track == nil) {
180+
result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]);
181+
} else {
182+
result([FlutterError errorWithCode:[@"Track is class of " stringByAppendingString:[[track class] description]] message:nil details:nil]);
183+
}
184+
}
169185
} else if ([@"setLocalDescription" isEqualToString:call.method]) {
170186
NSDictionary* argsMap = call.arguments;
171187
NSString* peerConnectionId = argsMap[@"peerConnectionId"];
@@ -574,4 +590,3 @@ - (CGRect)parseRect:(NSDictionary *)rect {
574590
}
575591

576592
@end
577-

0 commit comments

Comments
 (0)
0