Bug 1388219 - down scale camera output frame to the target capability. r=jib draft
authorMunro Mengjue Chiang <mchiang@mozilla.com>
Fri, 01 Dec 2017 10:12:51 +0800
changeset 706074 d29809c0b898648b535afdd2c9e3e97a297d99e0
parent 706073 5cab9cdb5cc1a67d6cf4c0b5c5c7caef5cfe7ea0
child 742560 eb61ea1e2d22bd61592079d77927caa657545918
push id91690
push userbmo:mchiang@mozilla.com
push dateFri, 01 Dec 2017 07:16:16 +0000
reviewersjib
bugs1388219
milestone59.0a1
Bug 1388219 - down scale camera output frame to the target capability. r=jib MozReview-Commit-ID: 7dlbWXndbgf
dom/media/systemservices/CamerasParent.cpp
dom/media/webrtc/MediaEngineCameraVideoSource.cpp
dom/media/webrtc/MediaEngineCameraVideoSource.h
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
media/webrtc/trunk/webrtc/video_engine/desktop_capture_impl.cc
--- a/dom/media/systemservices/CamerasParent.cpp
+++ b/dom/media/systemservices/CamerasParent.cpp
@@ -47,17 +47,17 @@ ResolutionFeasibilityDistance(int32_t ca
 {
   // The purpose of this function is to find a smallest resolution
   // which is larger than all requested capabilities.
   // Then we can use down-scaling to fulfill each request.
   uint32_t distance;
   if (candidate >= requested) {
     distance = (candidate - requested) * 1000 / std::max(candidate, requested);
   } else {
-    distance = (UINT32_MAX / 2) + (requested - candidate) *
+    distance = 10000 + (requested - candidate) *
       1000 / std::max(candidate, requested);
   }
   return distance;
 }
 
 uint32_t
 FeasibilityDistance(int32_t candidate, int32_t requested)
 {
@@ -857,24 +857,24 @@ CamerasParent::RecvStartCapture(const Ca
           capability.width = ipcCaps.width();
           capability.height = ipcCaps.height();
           capability.maxFPS = ipcCaps.maxFPS();
           capability.expectedCaptureDelay = ipcCaps.expectedCaptureDelay();
           capability.rawType = static_cast<webrtc::RawVideoType>(ipcCaps.rawType());
           capability.codecType = static_cast<webrtc::VideoCodecType>(ipcCaps.codecType());
           capability.interlaced = ipcCaps.interlaced();
 
+#ifdef DEBUG
+          auto deviceUniqueID = sDeviceUniqueIDs.find(capnum);
+          MOZ_ASSERT(deviceUniqueID == sDeviceUniqueIDs.end());
+#endif
+          sDeviceUniqueIDs.emplace(capnum, cap.VideoCapture()->CurrentDeviceName());
+          sAllRequestedCapabilities.emplace(capnum, capability);
+
           if (aCapEngine == CameraEngine) {
-#ifdef DEBUG
-            auto deviceUniqueID = sDeviceUniqueIDs.find(capnum);
-            MOZ_ASSERT(deviceUniqueID == sDeviceUniqueIDs.end());
-#endif
-            sDeviceUniqueIDs.emplace(capnum, cap.VideoCapture()->CurrentDeviceName());
-            sAllRequestedCapabilities.emplace(capnum, capability);
-
             for (const auto &it : sDeviceUniqueIDs) {
               if (strcmp(it.second, cap.VideoCapture()->CurrentDeviceName()) == 0) {
                 capability.width = std::max(
                   capability.width, sAllRequestedCapabilities[it.first].width);
                 capability.height = std::max(
                   capability.height, sAllRequestedCapabilities[it.first].height);
                 capability.maxFPS = std::max(
                   capability.maxFPS, sAllRequestedCapabilities[it.first].maxFPS);
@@ -903,16 +903,26 @@ CamerasParent::RecvStartCapture(const Ca
                   candidateCapability.second.maxFPS, capability.maxFPS));
               if (distance < minDistance) {
                 minIdx = candidateCapability.first;;
                 minDistance = distance;
               }
             }
             MOZ_ASSERT(minIdx != -1);
             capability = candidateCapabilities->second[minIdx];
+          } else if (aCapEngine == ScreenEngine ||
+                     aCapEngine == BrowserEngine ||
+                     aCapEngine == WinEngine ||
+                     aCapEngine == AppEngine) {
+            for (const auto &it : sDeviceUniqueIDs) {
+              if (strcmp(it.second, cap.VideoCapture()->CurrentDeviceName()) == 0) {
+                capability.maxFPS = std::max(
+                  capability.maxFPS, sAllRequestedCapabilities[it.first].maxFPS);
+              }
+            }
           }
 
           error = cap.VideoCapture()->StartCapture(capability);
 
           if (!error) {
             cap.VideoCapture()->RegisterCaptureDataCallback(
               static_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(*cbh));
           }
@@ -944,26 +954,24 @@ CamerasParent::StopCapture(const Capture
 {
   if (auto engine = EnsureInitialized(aCapEngine)) {
     // we're removing elements, iterate backwards
     for (size_t i = mCallbacks.Length(); i > 0; i--) {
       if (mCallbacks[i - 1]->mCapEngine == aCapEngine &&
           mCallbacks[i - 1]->mStreamId == (uint32_t)capnum) {
 
         CallbackHelper* cbh = mCallbacks[i-1];
-        engine->WithEntry(capnum,[cbh, &capnum, &aCapEngine](VideoEngine::CaptureEntry& cap){
+        engine->WithEntry(capnum,[cbh, &capnum](VideoEngine::CaptureEntry& cap){
           if (cap.VideoCapture()) {
             cap.VideoCapture()->DeRegisterCaptureDataCallback(
               static_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(cbh));
             cap.VideoCapture()->StopCaptureIfAllClientsClose();
 
-            if (aCapEngine == CameraEngine) {
-              sDeviceUniqueIDs.erase(capnum);
-              sAllRequestedCapabilities.erase(capnum);
-            }
+            sDeviceUniqueIDs.erase(capnum);
+            sAllRequestedCapabilities.erase(capnum);
           }
         });
 
         delete mCallbacks[i - 1];
         mCallbacks.RemoveElementAt(i - 1);
         break;
       }
     }
--- a/dom/media/webrtc/MediaEngineCameraVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineCameraVideoSource.cpp
@@ -23,17 +23,18 @@ bool MediaEngineCameraVideoSource::Appen
                                                  TrackID aID,
                                                  StreamTime delta,
                                                  const PrincipalHandle& aPrincipalHandle)
 {
   MOZ_ASSERT(aSource);
 
   VideoSegment segment;
   RefPtr<layers::Image> image = aImage;
-  IntSize size(image ? mWidth : 0, image ? mHeight : 0);
+  IntSize size = image ? image->GetSize() : IntSize(0, 0);
+
   segment.AppendFrame(image.forget(), delta, size, aPrincipalHandle);
 
   // This is safe from any thread, and is safe if the track is Finished
   // or Destroyed.
   // This can fail if either a) we haven't added the track yet, or b)
   // we've removed or finished the track.
   return aSource->AppendToTrack(aID, &(segment));
 }
--- a/dom/media/webrtc/MediaEngineCameraVideoSource.h
+++ b/dom/media/webrtc/MediaEngineCameraVideoSource.h
@@ -135,16 +135,17 @@ protected:
   // mMonitor also protects mSources[] and mPrincipalHandles[] access/changes.
   // mSources[] and mPrincipalHandles[] are accessed from webrtc threads.
 
   // All the mMonitor accesses are from the child classes.
   Monitor mMonitor; // Monitor for processing Camera frames.
   nsTArray<RefPtr<SourceMediaStream>> mSources; // When this goes empty, we shut down HW
   nsTArray<PrincipalHandle> mPrincipalHandles; // Directly mapped to mSources.
   RefPtr<layers::Image> mImage;
+  nsTArray<RefPtr<layers::Image>> mImages;
   nsTArray<webrtc::CaptureCapability> mTargetCapabilities;
   nsTArray<uint64_t> mHandleIds;
   RefPtr<layers::ImageContainer> mImageContainer;
   // end of data protected by mMonitor
 
   int mWidth, mHeight;
   bool mInitDone;
   int mCaptureIndex;
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -5,16 +5,19 @@
 
 #include "MediaEngineRemoteVideoSource.h"
 
 #include "mozilla/RefPtr.h"
 #include "VideoUtils.h"
 #include "nsIPrefService.h"
 #include "MediaTrackConstraints.h"
 #include "CamerasChild.h"
+#include "VideoFrameUtils.h"
+#include "webrtc/api/video/i420_buffer.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 
 extern mozilla::LogModule* GetMediaManagerLog();
 #define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
 #define LOGFRAME(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
 
 namespace mozilla {
 
 uint64_t MediaEngineCameraVideoSource::AllocationHandle::sId = 0;
@@ -79,16 +82,17 @@ MediaEngineRemoteVideoSource::Shutdown()
     while (1) {
       {
         MonitorAutoLock lock(mMonitor);
         empty = mSources.IsEmpty();
         if (empty) {
           MOZ_ASSERT(mPrincipalHandles.IsEmpty());
           MOZ_ASSERT(mTargetCapabilities.IsEmpty());
           MOZ_ASSERT(mHandleIds.IsEmpty());
+          MOZ_ASSERT(mImages.IsEmpty());
           break;
         }
         source = mSources[0];
       }
       Stop(source, kVideoTrack); // XXX change to support multiple tracks
     }
     MOZ_ASSERT(mState == kStopped);
   }
@@ -127,16 +131,17 @@ MediaEngineRemoteVideoSource::Allocate(
   }
   if (mState == kStarted &&
       MOZ_LOG_TEST(GetMediaManagerLog(), mozilla::LogLevel::Debug)) {
     MonitorAutoLock lock(mMonitor);
     if (mSources.IsEmpty()) {
       MOZ_ASSERT(mPrincipalHandles.IsEmpty());
       MOZ_ASSERT(mTargetCapabilities.IsEmpty());
       MOZ_ASSERT(mHandleIds.IsEmpty());
+      MOZ_ASSERT(mImages.IsEmpty());
       LOG(("Video device %d reallocated", mCaptureIndex));
     } else {
       LOG(("Video device %d allocated shared", mCaptureIndex));
     }
   }
   return NS_OK;
 }
 
@@ -169,35 +174,38 @@ MediaEngineRemoteVideoSource::Start(Sour
 {
   LOG((__PRETTY_FUNCTION__));
   AssertIsOnOwningThread();
   if (!mInitDone || !aStream) {
     LOG(("No stream or init not done"));
     return NS_ERROR_FAILURE;
   }
 
+  mImageContainer =
+    layers::LayerManager::CreateImageContainer(layers::ImageContainer::ASYNCHRONOUS);
+
   {
     MonitorAutoLock lock(mMonitor);
     mSources.AppendElement(aStream);
     mPrincipalHandles.AppendElement(aPrincipalHandle);
     mTargetCapabilities.AppendElement(mTargetCapability);
     mHandleIds.AppendElement(mHandleId);
+    mImages.AppendElement(mImageContainer->CreatePlanarYCbCrImage());
 
     MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
     MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
     MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
+    MOZ_ASSERT(mSources.Length() == mImages.Length());
   }
 
   aStream->AddTrack(aID, 0, new VideoSegment(), SourceMediaStream::ADDTRACK_QUEUED);
 
   if (mState == kStarted) {
     return NS_OK;
   }
-  mImageContainer =
-    layers::LayerManager::CreateImageContainer(layers::ImageContainer::ASYNCHRONOUS);
 
   mState = kStarted;
   mTrackID = aID;
 
   if (mozilla::camera::GetChildAndCall(
     &mozilla::camera::CamerasChild::StartCapture,
     mCapEngine, mCaptureIndex, mCapability, this)) {
     LOG(("StartCapture failed"));
@@ -226,20 +234,22 @@ MediaEngineRemoteVideoSource::Stop(mozil
     if (i == mSources.NoIndex) {
       // Already stopped - this is allowed
       return NS_OK;
     }
 
     MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
     MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
     MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
+    MOZ_ASSERT(mSources.Length() == mImages.Length());
     mSources.RemoveElementAt(i);
     mPrincipalHandles.RemoveElementAt(i);
     mTargetCapabilities.RemoveElementAt(i);
     mHandleIds.RemoveElementAt(i);
+    mImages.RemoveElementAt(i);
 
     aSource->EndTrack(aID);
 
     if (!mSources.IsEmpty()) {
       return NS_OK;
     }
     if (mState != kStarted) {
       return NS_ERROR_FAILURE;
@@ -313,17 +323,22 @@ MediaEngineRemoteVideoSource::UpdateSing
 
         if (!ChooseCapability(aNewConstraint, aPrefs, aDeviceId, mTargetCapability,
                               kFitness)) {
           *aOutBadConstraint = FindBadConstraint(aNewConstraint, *this, aDeviceId);
           return NS_ERROR_FAILURE;
         }
 
         if (index != mHandleIds.NoIndex) {
+          MonitorAutoLock lock(mMonitor);
           mTargetCapabilities[index] = mTargetCapability;
+          MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
+          MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
+          MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
+          MOZ_ASSERT(mSources.Length() == mImages.Length());
         }
 
         if (!ChooseCapability(aNetConstraints, aPrefs, aDeviceId, mCapability,
                               kFeasibility)) {
           *aOutBadConstraint = FindBadConstraint(aNetConstraints, *this, aDeviceId);
           return NS_ERROR_FAILURE;
         }
 
@@ -380,28 +395,32 @@ MediaEngineRemoteVideoSource::SetLastCap
 }
 
 void
 MediaEngineRemoteVideoSource::NotifyPull(MediaStreamGraph* aGraph,
                                          SourceMediaStream* aSource,
                                          TrackID aID, StreamTime aDesiredTime,
                                          const PrincipalHandle& aPrincipalHandle)
 {
-  VideoSegment segment;
-
+  StreamTime delta = 0;
+  size_t i;
   MonitorAutoLock lock(mMonitor);
   if (mState != kStarted) {
     return;
   }
 
-  StreamTime delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
+  i = mSources.IndexOf(aSource);
+  if (i == mSources.NoIndex) {
+    return;
+  }
+
+  delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
 
   if (delta > 0) {
-    // nullptr images are allowed
-    AppendToTrack(aSource, mImage, aID, delta, aPrincipalHandle);
+    AppendToTrack(aSource, mImages[i], aID, delta, aPrincipalHandle);
   }
 }
 
 void
 MediaEngineRemoteVideoSource::FrameSizeChange(unsigned int w, unsigned int h)
 {
   if ((mWidth < 0) || (mHeight < 0) ||
       (w !=  (unsigned int) mWidth) || (h != (unsigned int) mHeight)) {
@@ -414,73 +433,137 @@ MediaEngineRemoteVideoSource::FrameSizeC
       settings->mWidth.Value() = w;
       settings->mHeight.Value() = h;
       return NS_OK;
     }));
   }
 }
 
 int
-MediaEngineRemoteVideoSource::DeliverFrame(uint8_t* aBuffer ,
+MediaEngineRemoteVideoSource::DeliverFrame(uint8_t* aBuffer,
                                     const camera::VideoFrameProperties& aProps)
 {
+  MonitorAutoLock lock(mMonitor);
   // Check for proper state.
-  if (mState != kStarted) {
+  if (mState != kStarted || !mImageContainer) {
     LOG(("DeliverFrame: video not started"));
     return 0;
   }
 
   // Update the dimensions
   FrameSizeChange(aProps.width(), aProps.height());
 
-  layers::PlanarYCbCrData data;
-  RefPtr<layers::PlanarYCbCrImage> image;
-  {
-    // We grab the lock twice, but don't hold it across the (long) CopyData
-    MonitorAutoLock lock(mMonitor);
-    if (!mImageContainer) {
-      LOG(("DeliverFrame() called after Stop()!"));
-      return 0;
+  MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
+  MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
+  MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
+  MOZ_ASSERT(mSources.Length() == mImages.Length());
+
+  for (uint32_t i = 0; i < mTargetCapabilities.Length(); i++ ) {
+    int32_t req_max_width = mTargetCapabilities[i].width & 0xffff;
+    int32_t req_max_height = mTargetCapabilities[i].height & 0xffff;
+    int32_t req_ideal_width = (mTargetCapabilities[i].width >> 16) & 0xffff;
+    int32_t req_ideal_height = (mTargetCapabilities[i].height >> 16) & 0xffff;
+
+    int32_t dest_max_width = std::min(req_max_width, mWidth);
+    int32_t dest_max_height = std::min(req_max_height, mHeight);
+    // This logic works for both camera and screen sharing case.
+    // for camera case, req_ideal_width and req_ideal_height is 0.
+    // The following snippet will set dst_width to dest_max_width and dst_height to dest_max_height
+    int32_t dst_width = std::min(req_ideal_width > 0 ? req_ideal_width : mWidth, dest_max_width);
+    int32_t dst_height = std::min(req_ideal_height > 0 ? req_ideal_height : mHeight, dest_max_height);
+
+    int dst_stride_y = dst_width;
+    int dst_stride_uv = (dst_width + 1) / 2;
+
+    camera::VideoFrameProperties properties;
+    uint8_t* frame;
+    bool needReScale = !((dst_width == mWidth && dst_height == mHeight) ||
+                         (dst_width > mWidth || dst_height > mHeight));
+
+    if (!needReScale) {
+      dst_width = mWidth;
+      dst_height = mHeight;
+      frame = aBuffer;
+    } else {
+      rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer;
+      i420Buffer = webrtc::I420Buffer::Create(mWidth, mHeight, mWidth,
+                                              (mWidth + 1) / 2, (mWidth + 1) / 2);
+
+      const int conversionResult = webrtc::ConvertToI420(webrtc::kI420,
+                                                         aBuffer,
+                                                         0, 0,  // No cropping
+                                                         mWidth, mHeight,
+                                                         mWidth * mHeight * 3 / 2,
+                                                         webrtc::kVideoRotation_0,
+                                                         i420Buffer.get());
+
+      webrtc::VideoFrame captureFrame(i420Buffer, 0, 0, webrtc::kVideoRotation_0);
+      if (conversionResult < 0) {
+        return 0;
+      }
+
+      rtc::scoped_refptr<webrtc::I420Buffer> scaledBuffer;
+      scaledBuffer = webrtc::I420Buffer::Create(dst_width, dst_height, dst_stride_y,
+                                                dst_stride_uv, dst_stride_uv);
+
+      scaledBuffer->CropAndScaleFrom(*captureFrame.video_frame_buffer().get());
+      webrtc::VideoFrame scaledFrame(scaledBuffer, 0, 0, webrtc::kVideoRotation_0);
+
+      VideoFrameUtils::InitFrameBufferProperties(scaledFrame, properties);
+      frame = new unsigned char[properties.bufferSize()];
+
+      if (!frame) {
+        return 0;
+      }
+
+      VideoFrameUtils::CopyVideoFrameBuffers(frame,
+                                             properties.bufferSize(), scaledFrame);
     }
+
     // Create a video frame and append it to the track.
-    image = mImageContainer->CreatePlanarYCbCrImage();
+    RefPtr<layers::PlanarYCbCrImage> image = mImageContainer->CreatePlanarYCbCrImage();
 
-    uint8_t* frame = static_cast<uint8_t*> (aBuffer);
     const uint8_t lumaBpp = 8;
     const uint8_t chromaBpp = 4;
 
+    layers::PlanarYCbCrData data;
+
     // Take lots of care to round up!
     data.mYChannel = frame;
-    data.mYSize = IntSize(mWidth, mHeight);
-    data.mYStride = (mWidth * lumaBpp + 7)/ 8;
-    data.mCbCrStride = (mWidth * chromaBpp + 7) / 8;
-    data.mCbChannel = frame + mHeight * data.mYStride;
-    data.mCrChannel = data.mCbChannel + ((mHeight+1)/2) * data.mCbCrStride;
-    data.mCbCrSize = IntSize((mWidth+1)/ 2, (mHeight+1)/ 2);
+    data.mYSize = IntSize(dst_width, dst_height);
+    data.mYStride = (dst_width * lumaBpp + 7) / 8;
+    data.mCbCrStride = (dst_width * chromaBpp + 7) / 8;
+    data.mCbChannel = frame + dst_height * data.mYStride;
+    data.mCrChannel = data.mCbChannel + ((dst_height + 1) / 2) * data.mCbCrStride;
+    data.mCbCrSize = IntSize((dst_width + 1) / 2, (dst_height + 1) / 2);
     data.mPicX = 0;
     data.mPicY = 0;
-    data.mPicSize = IntSize(mWidth, mHeight);
+    data.mPicSize = IntSize(dst_width, dst_height);
     data.mStereoMode = StereoMode::MONO;
-  }
+
+    if (!image->CopyData(data)) {
+      MOZ_ASSERT(false);
+      return 0;
+    }
 
-  if (!image->CopyData(data)) {
-    MOZ_ASSERT(false);
-    return 0;
-  }
+    if (needReScale && frame) {
+      delete frame;
+      frame = nullptr;
+    }
 
-  MonitorAutoLock lock(mMonitor);
 #ifdef DEBUG
-  static uint32_t frame_num = 0;
-  LOGFRAME(("frame %d (%dx%d); timeStamp %u, ntpTimeMs %" PRIu64 ", renderTimeMs %" PRIu64,
-            frame_num++, mWidth, mHeight,
-            aProps.timeStamp(), aProps.ntpTimeMs(), aProps.renderTimeMs()));
+    static uint32_t frame_num = 0;
+    LOGFRAME(("frame %d (%dx%d); timeStamp %u, ntpTimeMs %" PRIu64 ", renderTimeMs %" PRIu64,
+              frame_num++, mWidth, mHeight,
+              aProps.timeStamp(), aProps.ntpTimeMs(), aProps.renderTimeMs()));
 #endif
 
-  // implicitly releases last image
-  mImage = image.forget();
+    // implicitly releases last image
+    mImages[i] = image.forget();
+  }
 
   // We'll push the frame into the MSG on the next NotifyPull. This will avoid
   // swamping the MSG with frames should it be taking longer than normal to run
   // an iteration.
 
   return 0;
 }
 
--- a/media/webrtc/trunk/webrtc/video_engine/desktop_capture_impl.cc
+++ b/media/webrtc/trunk/webrtc/video_engine/desktop_capture_impl.cc
@@ -577,55 +577,17 @@ int32_t DesktopCaptureImpl::IncomingFram
     webrtc::VideoFrame captureFrame(buffer, 0, 0, kVideoRotation_0);
     if (conversionResult < 0) {
       WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                    "Failed to convert capture frame from type %d to I420",
                    frameInfo.rawType);
       return -1;
     }
 
-    int32_t req_max_width = _requestedCapability.width & 0xffff;
-    int32_t req_max_height = _requestedCapability.height & 0xffff;
-    int32_t req_ideal_width = (_requestedCapability.width >> 16) & 0xffff;
-    int32_t req_ideal_height = (_requestedCapability.height >> 16) & 0xffff;
-
-    int32_t dest_max_width = std::min(req_max_width, target_width);
-    int32_t dest_max_height = std::min(req_max_height, target_height);
-    int32_t dst_width = std::min(req_ideal_width > 0 ? req_ideal_width : target_width, dest_max_width);
-    int32_t dst_height = std::min(req_ideal_height > 0 ? req_ideal_height : target_height, dest_max_height);
-
-    // scale to average of portrait and landscape
-    float scale_width = (float)dst_width / (float)target_width;
-    float scale_height = (float)dst_height / (float)target_height;
-    float scale = (scale_width + scale_height) / 2;
-    dst_width = (int)(scale * target_width);
-    dst_height = (int)(scale * target_height);
-
-    // if scaled rectangle exceeds max rectangle, scale to minimum of portrait and landscape
-    if (dst_width > dest_max_width || dst_height > dest_max_height) {
-      scale_width = (float)dest_max_width / (float)dst_width;
-      scale_height = (float)dest_max_height / (float)dst_height;
-      scale = std::min(scale_width, scale_height);
-      dst_width = (int)(scale * dst_width);
-      dst_height = (int)(scale * dst_height);
-    }
-
-    int dst_stride_y = dst_width;
-    int dst_stride_uv = (dst_width + 1) / 2;
-    if (dst_width == target_width && dst_height == target_height) {
-      DeliverCapturedFrame(captureFrame, captureTime);
-    } else {
-      rtc::scoped_refptr<webrtc::I420Buffer> buffer;
-      buffer = I420Buffer::Create(dst_width, dst_height, dst_stride_y,
-                                  dst_stride_uv, dst_stride_uv);
-
-      buffer->ScaleFrom(*captureFrame.video_frame_buffer().get());
-      webrtc::VideoFrame scaledFrame(buffer, 0, 0, kVideoRotation_0);
-      DeliverCapturedFrame(scaledFrame, captureTime);
-    }
+    DeliverCapturedFrame(captureFrame, captureTime);
   } else {
     assert(false);
     return -1;
   }
 
   const int64_t processTime =
     (rtc::TimeNanos() - startProcessTime)/rtc::kNumNanosecsPerMillisec;