@@ -241,48 +241,49 @@ - (void)getUserVideo:(NSDictionary *)constraints
241
241
} else {
242
242
// If the specified facingMode value is not supported, fall back to
243
243
// the default video device.
244
+ self._usingFrontCamera = NO ;
244
245
position = AVCaptureDevicePositionUnspecified;
245
246
}
246
- if (AVCaptureDevicePositionUnspecified != position) {
247
- for (AVCaptureDevice *aVideoDevice in [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo]) {
248
- if (aVideoDevice.position == position) {
249
- videoDevice = aVideoDevice;
250
- break ;
251
- }
252
- }
253
- }
247
+ videoDevice = [self findDeviceForPosition: position];
254
248
}
255
249
}
256
250
if (!videoDevice) {
257
251
videoDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];
258
252
}
259
253
}
260
254
255
+ // TODO(rostopira): refactor to separate function and add support for max
256
+ self._targetWidth = 1280 ;
257
+ self._targetHeight = 720 ;
258
+ self._targetFps = 30 ;
259
+ id widthConstraint = videoConstraints[kRTCMediaConstraintsMinWidth ];
260
+ if ([widthConstraint isKindOfClass: [NSString class ]]) {
261
+ int possibleWidth = [widthConstraint intValue ];
262
+ if (possibleWidth != 0 ) {
263
+ self._targetWidth = possibleWidth;
264
+ }
265
+ }
266
+ id heightConstraint = videoConstraints[kRTCMediaConstraintsMinHeight ];
267
+ if ([heightConstraint isKindOfClass: [NSString class ]]) {
268
+ int possibleHeight = [heightConstraint intValue ];
269
+ if (possibleHeight != 0 ) {
270
+ self._targetHeight = possibleHeight;
271
+ }
272
+ }
273
+ id fpsConstraint = videoConstraints[kRTCMediaConstraintsMinFrameRate ];
274
+ if ([fpsConstraint isKindOfClass: [NSString class ]]) {
275
+ int possibleFps = [fpsConstraint intValue ];
276
+ if (possibleFps != 0 ) {
277
+ self._targetFps = possibleFps;
278
+ }
279
+ }
280
+
261
281
if (videoDevice) {
262
282
RTCVideoSource *videoSource = [self .peerConnectionFactory videoSource ];
263
- // FIXME: Video capturer shouldn't be local to be able to stop
264
283
self.videoCapturer = [[RTCCameraVideoCapturer alloc ] initWithDelegate: videoSource];
265
- AVCaptureDeviceFormat *selectedFormat = nil ;
266
- int currentDiff = INT_MAX;
267
- // TODO: use values from constraints map
268
- int targetWidth = 1280 ;
269
- int targetHeight = 720 ;
270
- for (AVCaptureDeviceFormat *format in [RTCCameraVideoCapturer supportedFormatsForDevice: videoDevice]) {
271
- CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions (format.formatDescription );
272
- FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType (format.formatDescription );
273
- int diff = abs (targetWidth - dimension.width ) + abs (targetHeight - dimension.height );
274
- if (diff < currentDiff) {
275
- selectedFormat = format;
276
- currentDiff = diff;
277
- } else if (diff == currentDiff && pixelFormat == [self .videoCapturer preferredOutputPixelFormat ]) {
278
- selectedFormat = format;
279
- }
280
- }
281
- if (selectedFormat == nil ) {
282
- NSLog (@" Capture format is nil. Fallback" );
283
- selectedFormat = [RTCCameraVideoCapturer supportedFormatsForDevice: videoDevice].firstObject ;
284
- }
285
- [self .videoCapturer startCaptureWithDevice: videoDevice format: selectedFormat fps: 30 completionHandler: ^(NSError *error) {
284
+ AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice: videoDevice];
285
+ NSInteger selectedFps = [self selectFpsForFormat: selectedFormat];
286
+ [self .videoCapturer startCaptureWithDevice: videoDevice format: selectedFormat fps: selectedFps completionHandler: ^(NSError *error) {
286
287
if (error) {
287
288
NSLog (@" Start capture error: %@ " , [error localizedDescription ]);
288
289
}
@@ -435,34 +436,63 @@ -(void)mediaStreamTrackSetEnabled:(RTCMediaStreamTrack *)track : (BOOL)enabled
435
436
436
437
-(void )mediaStreamTrackSwitchCamera : (RTCMediaStreamTrack *)track
437
438
{
438
- if (!self.videoCapturer ) {
439
- NSLog (@" Video capturer is null. Can't switch camera" );
440
- return ;
441
- }
442
- self._usingFrontCamera = !self._usingFrontCamera ;
443
- AVCaptureDevicePosition position = self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
444
- AVCaptureDevice *videoDevice;
445
- if (AVCaptureDevicePositionUnspecified != position) {
446
- for (AVCaptureDevice *aVideoDevice in [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo]) {
447
- if (aVideoDevice.position == position) {
448
- videoDevice = aVideoDevice;
449
- break ;
450
- }
439
+ if (!self.videoCapturer ) {
440
+ NSLog (@" Video capturer is null. Can't switch camera" );
441
+ return ;
451
442
}
452
- }
453
- if (!videoDevice) {
454
- videoDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];
455
- }
456
- AVCaptureDeviceFormat *selectedFormat = [RTCCameraVideoCapturer supportedFormatsForDevice: videoDevice].firstObject ;
457
- [self .videoCapturer startCaptureWithDevice: videoDevice format: selectedFormat fps: 30 ];
443
+ self._usingFrontCamera = !self._usingFrontCamera ;
444
+ AVCaptureDevicePosition position = self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
445
+ AVCaptureDevice *videoDevice = [self findDeviceForPosition: position];
446
+ AVCaptureDeviceFormat *selectedFormat = [self selectFormatForDevice: videoDevice];
447
+ [self .videoCapturer startCaptureWithDevice: videoDevice format: selectedFormat fps: [self selectFpsForFormat: selectedFormat]];
458
448
}
459
449
460
450
-(void )mediaStreamTrackStop : (RTCMediaStreamTrack *)track
461
451
{
462
- if (track) {
463
- track.isEnabled = NO ;
464
- [self .localTracks removeObjectForKey: track.trackId];
465
- }
452
+ if (track) {
453
+ track.isEnabled = NO ;
454
+ [self .localTracks removeObjectForKey: track.trackId];
455
+ }
456
+ }
457
+
458
+ - (AVCaptureDevice *)findDeviceForPosition : (AVCaptureDevicePosition)position {
459
+ if (position == AVCaptureDevicePositionUnspecified) {
460
+ return [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];
461
+ }
462
+ NSArray <AVCaptureDevice *> *captureDevices = [RTCCameraVideoCapturer captureDevices ];
463
+ for (AVCaptureDevice *device in captureDevices) {
464
+ if (device.position == position) {
465
+ return device;
466
+ }
467
+ }
468
+ return captureDevices[0 ];
469
+ }
470
+
471
+ - (AVCaptureDeviceFormat *)selectFormatForDevice : (AVCaptureDevice *)device {
472
+ NSArray <AVCaptureDeviceFormat *> *formats =
473
+ [RTCCameraVideoCapturer supportedFormatsForDevice: device];
474
+ AVCaptureDeviceFormat *selectedFormat = nil ;
475
+ int currentDiff = INT_MAX;
476
+ for (AVCaptureDeviceFormat *format in formats) {
477
+ CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions (format.formatDescription );
478
+ FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType (format.formatDescription );
479
+ int diff = abs (self._targetWidth - dimension.width ) + abs (self._targetHeight - dimension.height );
480
+ if (diff < currentDiff) {
481
+ selectedFormat = format;
482
+ currentDiff = diff;
483
+ } else if (diff == currentDiff && pixelFormat == [self .videoCapturer preferredOutputPixelFormat ]) {
484
+ selectedFormat = format;
485
+ }
486
+ }
487
+ return selectedFormat;
488
+ }
489
+
490
+ - (NSInteger )selectFpsForFormat : (AVCaptureDeviceFormat *)format {
491
+ Float64 maxSupportedFramerate = 0 ;
492
+ for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges ) {
493
+ maxSupportedFramerate = fmax (maxSupportedFramerate, fpsRange.maxFrameRate );
494
+ }
495
+ return fmin (maxSupportedFramerate, self._targetFps );
466
496
}
467
497
468
498
@end
0 commit comments