Bug 1273734 - [AVFoundation] Expose different FPS range as discrete capability; r=jib draft
authorMunro Mengjue Chiang <mchiang@mozilla.com>
Thu, 03 Nov 2016 16:17:25 +0800
changeset 433209 477e5e09ab7bc1b2b2685fc161f5336df9b7d553
parent 433112 ade8d4a63e57560410de106450f37b50ed71cca5
child 535820 626f5ce156a8479e522c139a6386275ddbe900c9
push id34503
push usermchiang@mozilla.com
push dateThu, 03 Nov 2016 08:18:08 +0000
reviewersjib
bugs1273734
milestone52.0a1
Bug 1273734 - [AVFoundation] Expose different FPS range as discrete capability; r=jib MozReview-Commit-ID: 3k3r87VDhDR
media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_info_objc.h
media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_info_objc.mm
--- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_info_objc.h
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_info_objc.h
@@ -24,16 +24,17 @@
 
 @interface VideoCaptureMacAVFoundationInfoObjC : NSObject{
     bool                                _OSSupportedInfo;
     NSArray*                            _captureDevicesInfo;
     int                                    _captureDeviceCountInfo;
     NSArray*                            _observers;
     NSLock*                             _lock;
     webrtc::videocapturemodule::VideoCaptureMacAVFoundationInfo* _owner;
+    NSMutableDictionary*                _capabilityMaps;
 
 }
 
 /**************************************************************************
  *
  *   The following functions are considered to be private
  *
  ***************************************************************************/
--- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_info_objc.mm
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/avfoundation/video_capture_avfoundation_info_objc.mm
@@ -53,16 +53,24 @@ using namespace videocapturemodule;
 
     // Remove Observers
     NSNotificationCenter* notificationCenter = [NSNotificationCenter defaultCenter];
     for (id observer in _observers)
         [notificationCenter removeObserver:observer];
     [_observers release];
     [_lock release];
 
+    for (NSMutableArray* capabilityMap in _capabilityMaps) {
+        [capabilityMap removeAllObjects];
+        [capabilityMap release];
+    }
+
+    [_capabilityMaps removeAllObjects];
+    [_capabilityMaps release];
+
     [super dealloc];
 }
 
 // ****************** public methods ******************
 #pragma mark **** public method implementations
 
 /// ***** Creates a message box with Cocoa framework
 /// ***** Returns 0 on success, -1 otherwise.
@@ -92,89 +100,121 @@ using namespace videocapturemodule;
 
     AVCaptureDevice* captureDevice = nil;
     if (uniqueId == nil || !strcmp("", uniqueId)) {
         WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
             "Incorrect capture id argument");
         return [NSNumber numberWithInt:-1];
     }
 
-    for (int index = 0; index < _captureDeviceCountInfo; index++) {
-        captureDevice = (AVCaptureDevice*)[_captureDevicesInfo objectAtIndex:index];
+    int deviceIndex;
+
+    for (deviceIndex = 0; deviceIndex < _captureDeviceCountInfo; deviceIndex++) {
+        captureDevice = (AVCaptureDevice*)[_captureDevicesInfo objectAtIndex:deviceIndex];
         char captureDeviceId[1024] = "";
         [[captureDevice uniqueID] getCString:captureDeviceId
                                    maxLength:1024
                                     encoding:NSUTF8StringEncoding];
         if (strcmp(uniqueId, captureDeviceId) == 0) {
             WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
                 "%s:%d Found capture device id %s as index %d",
-                __FUNCTION__, __LINE__, captureDeviceId, index);
+                __FUNCTION__, __LINE__, captureDeviceId, deviceIndex);
             break;
         }
         captureDevice = nil;
     }
 
     if (!captureDevice)
         return [NSNumber numberWithInt:-1];
 
-    return [NSNumber numberWithInt:[captureDevice formats].count];
+    NSMutableArray* capabilityMap = (NSMutableArray*)[_capabilityMaps objectForKey:[NSNumber numberWithInt:deviceIndex]];
+
+    if (capabilityMap != nil) {
+        [capabilityMap removeAllObjects];
+    } else {
+        capabilityMap = [[NSMutableArray alloc] init];
+        [_capabilityMaps setObject:capabilityMap forKey:[NSNumber numberWithInt:deviceIndex]];
+    }
+
+    int count = 0;
+
+    for (int formatIndex = 0; formatIndex < (int)[captureDevice formats].count; formatIndex++) {
+        AVCaptureDeviceFormat* format =
+            (AVCaptureDeviceFormat*) [[captureDevice formats] objectAtIndex:formatIndex];
+
+        count += format.videoSupportedFrameRateRanges.count;
+        for (int frameRateIndex = 0;
+                frameRateIndex < (int) format.videoSupportedFrameRateRanges.count;
+                frameRateIndex++) {
+            [capabilityMap addObject: [NSNumber numberWithInt:((formatIndex << 16) + (frameRateIndex & 0xffff))]];
+        }
+    }
+
+    return [NSNumber numberWithInt:count];
 }
 
 - (NSNumber*)getCaptureCapability:(const char*)uniqueId
                      CapabilityId:(uint32_t)capabilityId
                  Capability_width:(int32_t*)width
                 Capability_height:(int32_t*)height
                 Capability_maxFPS:(int32_t*)maxFPS
                 Capability_format:(webrtc::RawVideoType*)rawType
 {
     AVCaptureDevice* captureDevice = nil;
     if (uniqueId == nil || !strcmp("", uniqueId)) {
         WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
             "Incorrect capture id argument");
         return [NSNumber numberWithInt:-1];
     }
 
-    for (int index = 0; index < _captureDeviceCountInfo; index++) {
-        captureDevice = (AVCaptureDevice*)[_captureDevicesInfo objectAtIndex:index];
+    int deviceIndex;
+
+    for (deviceIndex = 0; deviceIndex < _captureDeviceCountInfo; deviceIndex++) {
+        captureDevice = (AVCaptureDevice*)[_captureDevicesInfo objectAtIndex:deviceIndex];
         char captureDeviceId[1024] = "";
         [[captureDevice uniqueID] getCString:captureDeviceId
                                    maxLength:1024
                                     encoding:NSUTF8StringEncoding];
         if (strcmp(uniqueId, captureDeviceId) == 0) {
             WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
                 "%s:%d Found capture device id %s as index %d",
-                __FUNCTION__, __LINE__, captureDeviceId, index);
+                __FUNCTION__, __LINE__, captureDeviceId, deviceIndex);
             break;
         }
         captureDevice = nil;
     }
 
     if (!captureDevice)
         return [NSNumber numberWithInt:-1];
 
-    AVCaptureDeviceFormat* format = (AVCaptureDeviceFormat*)[[captureDevice formats]objectAtIndex:capabilityId];
-    CMVideoDimensions videoDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
-    AVFrameRateRange* maxFrameRateRange = nil;
+    NSMutableArray* capabilityMap = [_capabilityMaps objectForKey:[NSNumber numberWithInt:deviceIndex]];
+    NSNumber* indexNumber = [capabilityMap objectAtIndex:capabilityId];
+
+    // protection for illegal capabilityId
+    if (!indexNumber)
+        return [NSNumber numberWithInt:-1];
 
-    for ( AVFrameRateRange* range in format.videoSupportedFrameRateRanges ) {
-        if ( range.maxFrameRate > maxFrameRateRange.maxFrameRate ) {
-            maxFrameRateRange = range;
-        }
-    }
+    int indexInt = static_cast<int>([indexNumber integerValue]);
+    int formatIndex = indexInt >> 16;
+    int frameRateIndex = indexInt & 0xffff;
+
+    AVCaptureDeviceFormat* format = (AVCaptureDeviceFormat*)[[captureDevice formats]objectAtIndex:formatIndex];
+    CMVideoDimensions videoDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+    AVFrameRateRange* frameRateRange = [format.videoSupportedFrameRateRanges objectAtIndex:frameRateIndex];
 
     *width = videoDimensions.width;
     *height = videoDimensions.height;
 
     // This is to fix setCaptureHeight() which fails for some webcams supporting non-integer framerates.
     // In setCaptureHeight(), we match the best framerate range by searching a range whose max framerate
     // is most close to (but smaller than or equal to) the target. Since maxFPS of capability is integer,
     // we fill in the capability maxFPS with the floor value (e.g., 29) of the real supported fps
     // (e.g., 29.97). If the target is set to 29, we failed to match the best format with max framerate
     // 29.97 since it is over the target. Therefore, we need to return a ceiling value as the maxFPS here.
-    *maxFPS = static_cast<int32_t>(ceil(maxFrameRateRange.maxFrameRate));
+    *maxFPS = static_cast<int32_t>(ceil(frameRateRange.maxFrameRate));
     *rawType = [VideoCaptureMacAVFoundationUtility fourCCToRawVideoType:CMFormatDescriptionGetMediaSubType(format.formatDescription)];
 
     return [NSNumber numberWithInt:0];
 }
 
 - (NSNumber*)getDeviceNamesFromIndex:(uint32_t)index
     DefaultName:(char*)deviceName
     WithLength:(uint32_t)deviceNameLength
@@ -266,16 +306,18 @@ using namespace videocapturemodule;
             [_lock lock];
             if(_owner)
                 _owner->DeviceChange();
             [_lock unlock];
         }];
 
     _observers = [[NSArray alloc] initWithObjects:deviceWasConnectedObserver, deviceWasDisconnectedObserver, nil];
 
+    _capabilityMaps = [[NSMutableDictionary alloc] init];
+
     return [NSNumber numberWithInt:0];
 }
 
 // ***** Checks to see if the AVCaptureSession framework is available in the OS
 // ***** If it is not, isOSSupprted = NO
 // ***** Throughout the rest of the class isOSSupprted is checked and functions
 // ***** are/aren't called depending
 // ***** The user can use weak linking to the AVFoundation framework and run on older