Bug 1404039 - Add a unittest for VideoConduit getting a signal to reduce quality due to load or bandwidth; r=pehrsons draft
authorDan Minor <dminor@mozilla.com>
Wed, 18 Oct 2017 15:19:21 -0400
changeset 684725 0c9518c0501ca579ed9d948bde63159c21d9657c
parent 684703 d49501f258b105c5e2dcd0a59896ec1ceabf726b
child 736945 7b57e9405c920ef2b5721e098307c2dd1ec11d22
push id85705
push userbmo:dminor@mozilla.com
push dateMon, 23 Oct 2017 13:51:56 +0000
reviewerspehrsons
bugs1404039
milestone58.0a1
Bug 1404039 - Add a unittest for VideoConduit getting a signal to reduce quality due to load or bandwidth; r=pehrsons MozReview-Commit-ID: 5J3wINSnStR
media/webrtc/signaling/gtest/moz.build
media/webrtc/signaling/gtest/videoconduit_unittests.cpp
media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
media/webrtc/signaling/src/media-conduit/VideoConduit.h
media/webrtc/trunk/webrtc/media/base/videoadapter.h
--- a/media/webrtc/signaling/gtest/moz.build
+++ b/media/webrtc/signaling/gtest/moz.build
@@ -22,16 +22,17 @@ if CONFIG['MOZ_WIDGET_TOOLKIT'] != 'uiki
       '/media/webrtc/trunk',
     ]
 
     SOURCES += [
         'jsep_session_unittest.cpp',
         'jsep_track_unittest.cpp',
         'mediaconduit_unittests.cpp',
         'sdp_unittests.cpp',
+        'videoconduit_unittests.cpp',
     ]
 
     # See Bug 1372950, mediapipeline tests seem to cause crashes on Windows
     if CONFIG['OS_TARGET'] != 'WINNT':
         SOURCES += [
             'mediapipeline_unittest.cpp',
         ]
 
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/gtest/videoconduit_unittests.cpp
@@ -0,0 +1,90 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#define GTEST_HAS_RTTI 0
+#include "gtest/gtest.h"
+
+#include "VideoConduit.h"
+
+#include "webrtc/media/base/videoadapter.h"
+
+using namespace mozilla;
+
+namespace test {
+
+class MockVideoAdapter : public cricket::VideoAdapter {
+public:
+
+  bool AdaptFrameResolution(int in_width,
+                            int in_height,
+                            int64_t in_timestamp_ns,
+                            int* cropped_width,
+                            int* cropped_height,
+                            int* out_width,
+                            int* out_height) override
+  {
+    mInWidth = in_width;
+    mInHeight = in_height;
+    mInTimestampNs = in_timestamp_ns;
+    return true;
+  }
+
+  void OnResolutionRequest(rtc::Optional<int> max_pixel_count,
+                           rtc::Optional<int> max_pixel_count_step_up) override
+  {
+    mMaxPixelCount = max_pixel_count.value_or(-1);
+    mMaxPixelCountStepUp = max_pixel_count_step_up.value_or(-1);
+  }
+
+  void OnScaleResolutionBy(rtc::Optional<float> scale_resolution_by) override
+  {
+    mScaleResolutionBy = scale_resolution_by.value_or(-1.0);
+  }
+
+  int mInWidth;
+  int mInHeight;
+  int64_t mInTimestampNs;
+  int mMaxPixelCount;
+  int mMaxPixelCountStepUp;
+  int mScaleResolutionBy;
+};
+
+class VideoConduitTest : public ::testing::Test {
+};
+
+TEST_F(VideoConduitTest, TestOnSinkWantsChanged)
+{
+  RefPtr<mozilla::WebrtcVideoConduit> videoConduit;
+  MockVideoAdapter* adapter = new MockVideoAdapter;
+  videoConduit = new WebrtcVideoConduit(WebRtcCallWrapper::Create(),
+                                        UniquePtr<cricket::VideoAdapter>(adapter));
+
+  rtc::VideoSinkWants wants;
+  wants.max_pixel_count = rtc::Optional<int>(256000);
+  EncodingConstraints constraints;
+  VideoCodecConfig codecConfig(120, "VP8", constraints);
+
+  codecConfig.mEncodingConstraints.maxFs = 0;
+  videoConduit->ConfigureSendMediaCodec(&codecConfig);
+  videoConduit->OnSinkWantsChanged(wants);
+  ASSERT_EQ(adapter->mMaxPixelCount, 256000);
+
+  codecConfig.mEncodingConstraints.maxFs = 500;
+  videoConduit->ConfigureSendMediaCodec(&codecConfig);
+  videoConduit->OnSinkWantsChanged(wants);
+  ASSERT_EQ(adapter->mMaxPixelCount, 500*16*16); //convert macroblocks to pixels
+
+  codecConfig.mEncodingConstraints.maxFs = 1000;
+  videoConduit->ConfigureSendMediaCodec(&codecConfig);
+  videoConduit->OnSinkWantsChanged(wants);
+  ASSERT_EQ(adapter->mMaxPixelCount, 256000);
+
+  wants.max_pixel_count = rtc::Optional<int>(64000);
+  codecConfig.mEncodingConstraints.maxFs = 500;
+  videoConduit->ConfigureSendMediaCodec(&codecConfig);
+  videoConduit->OnSinkWantsChanged(wants);
+  ASSERT_EQ(adapter->mMaxPixelCount, 64000);
+}
+
+} // End namespace test.
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -221,29 +221,32 @@ VideoSessionConduit::Create(RefPtr<WebRt
   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
   NS_ASSERTION(aCall, "missing required parameter: aCall");
   CSFLogVerbose(LOGTAG, "%s", __FUNCTION__);
 
   if (!aCall) {
     return nullptr;
   }
 
-  nsAutoPtr<WebrtcVideoConduit> obj(new WebrtcVideoConduit(aCall));
+  UniquePtr<cricket::VideoAdapter> videoAdapter(new cricket::VideoAdapter(1));
+  nsAutoPtr<WebrtcVideoConduit> obj(new WebrtcVideoConduit(aCall,
+                                    std::move(videoAdapter)));
   if(obj->Init() != kMediaConduitNoError) {
     CSFLogError(LOGTAG, "%s VideoConduit Init Failed ", __FUNCTION__);
     return nullptr;
   }
   CSFLogVerbose(LOGTAG, "%s Successfully created VideoConduit ", __FUNCTION__);
   return obj.forget();
 }
 
-WebrtcVideoConduit::WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall)
+WebrtcVideoConduit::WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall,
+                                       UniquePtr<cricket::VideoAdapter>&& aVideoAdapter)
   : mTransportMonitor("WebrtcVideoConduit")
   , mRenderer(nullptr)
-  , mVideoAdapter(1)
+  , mVideoAdapter(std::move(aVideoAdapter))
   , mVideoBroadcaster()
   , mEngineTransmitting(false)
   , mEngineReceiving(false)
   , mCapId(-1)
   , mCodecMutex("VideoConduit codec db")
   , mInReconfig(false)
   , mRecvStream(nullptr)
   , mSendStream(nullptr)
@@ -776,17 +779,17 @@ WebrtcVideoConduit::ConfigureSendMediaCo
   }
 
   // NOTE: the lifetime of this object MUST be less than the lifetime of the Conduit
   mEncoderConfig.SetVideoStreamFactory(
     new rtc::RefCountedObject<WebrtcVideoConduit::VideoStreamFactory>(
       codecConfig->mName, this));
 
   // Always call this to ensure it's reset
-  mVideoAdapter.OnScaleResolutionBy(
+  mVideoAdapter->OnScaleResolutionBy(
     (streamCount >= 1 && codecConfig->mSimulcastEncodings[0].constraints.scaleDownBy > 1.0) ?
     rtc::Optional<float>(codecConfig->mSimulcastEncodings[0].constraints.scaleDownBy) :
     rtc::Optional<float>());
 
   // XXX parse the encoded SPS/PPS data and set spsData/spsLen/ppsData/ppsLen
   mEncoderConfig.SetEncoderSpecificSettings(ConfigureVideoEncoderSettings(codecConfig, this));
   mEncoderConfig.SetResolutionDivisor(1);
 
@@ -1701,18 +1704,18 @@ WebrtcVideoConduit::SelectSendResolution
 
     // Limit resolution to max-fs
     if (mCurSendCodecConfig->mEncodingConstraints.maxFs) {
       // max-fs is in macroblocks, convert to pixels
       int max_fs(mCurSendCodecConfig->mEncodingConstraints.maxFs*(16*16));
       if (max_fs > mLastSinkWanted.max_pixel_count.value_or(max_fs)) {
         max_fs = mLastSinkWanted.max_pixel_count.value_or(max_fs);
       }
-      mVideoAdapter.OnResolutionRequest(rtc::Optional<int>(max_fs),
-                                        rtc::Optional<int>());
+      mVideoAdapter->OnResolutionRequest(rtc::Optional<int>(max_fs),
+                                         rtc::Optional<int>());
     }
   }
 
   // Adapt to getUserMedia resolution changes
   // check if we need to reconfigure the sending resolution.
   // NOTE: mSendingWidth != mLastWidth, because of maxwidth/height/etc above
   bool changed = false;
   if (mSendingWidth != width || mSendingHeight != height) {
@@ -1924,18 +1927,18 @@ WebrtcVideoConduit::OnSinkWantsChanged(
         max_pixel_count = rtc::Optional<int>(max_fs);
       }
 
       if (max_pixel_count_step_up.value_or(max_fs) > max_fs) {
         max_pixel_count_step_up = rtc::Optional<int>(max_fs);
       }
     }
 
-    mVideoAdapter.OnResolutionRequest(max_pixel_count,
-                                      max_pixel_count_step_up);
+    mVideoAdapter->OnResolutionRequest(max_pixel_count,
+                                       max_pixel_count_step_up);
   }
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::SendVideoFrame(webrtc::VideoFrame& frame)
 {
   // XXX Google uses a "timestamp_aligner" to translate timestamps from the
   // camera via TranslateTimestamp(); we should look at doing the same.  This
@@ -1967,17 +1970,17 @@ WebrtcVideoConduit::SendVideoFrame(webrt
     }
 
     int adapted_width;
     int adapted_height;
     int crop_width;
     int crop_height;
     int crop_x;
     int crop_y;
-    if (!mVideoAdapter.AdaptFrameResolution(
+    if (!mVideoAdapter->AdaptFrameResolution(
           frame.width(), frame.height(),
           frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec,
           &crop_width, &crop_height, &adapted_width, &adapted_height)) {
       // VideoAdapter dropped the frame.
       return kMediaConduitNoError;
     }
     crop_x = (frame.width() - crop_width) / 2;
     crop_y = (frame.height() - crop_height) / 2;
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.h
@@ -281,17 +281,18 @@ public:
   uint8_t TemporalLayers() const {
     return mTemporalLayers;
   }
 
   webrtc::VideoCodecMode CodecMode() const {
     return mCodecMode;
   }
 
-  explicit WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall);
+  WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall,
+                     UniquePtr<cricket::VideoAdapter>&& aVideoAdapter);
   virtual ~WebrtcVideoConduit();
 
   MediaConduitErrorCode InitMain();
   virtual MediaConduitErrorCode Init();
   virtual void Destroy();
 
   std::vector<unsigned int> GetLocalSSRCs() const override;
   bool SetLocalSSRCs(const std::vector<unsigned int> & ssrcs) override;
@@ -489,17 +490,17 @@ private:
 
   mozilla::ReentrantMonitor mTransportMonitor;
   RefPtr<TransportInterface> mTransmitterTransport;
   RefPtr<TransportInterface> mReceiverTransport;
   RefPtr<mozilla::VideoRenderer> mRenderer;
 
   // Frame adapter - handle sinks that we feed data to, and handle resolution
   // changes needed for them.
-  cricket::VideoAdapter mVideoAdapter;
+  UniquePtr<cricket::VideoAdapter> mVideoAdapter;
   rtc::VideoBroadcaster mVideoBroadcaster;
 
   // Engine state we are concerned with.
   mozilla::Atomic<bool> mEngineTransmitting; // If true ==> Transmit Subsystem is up and running
   mozilla::Atomic<bool> mEngineReceiving;    // if true ==> Receive Subsystem up and running
 
   int mCapId;   // Capturer for this conduit
   //Local database of currently applied receive codecs
--- a/media/webrtc/trunk/webrtc/media/base/videoadapter.h
+++ b/media/webrtc/trunk/webrtc/media/base/videoadapter.h
@@ -27,41 +27,41 @@ class VideoAdapter {
   VideoAdapter();
   explicit VideoAdapter(int required_resolution_alignment);
   virtual ~VideoAdapter();
 
   // Return the adapted resolution and cropping parameters given the
   // input resolution. The input frame should first be cropped, then
   // scaled to the final output resolution. Returns true if the frame
   // should be adapted, and false if it should be dropped.
-  bool AdaptFrameResolution(int in_width,
-                            int in_height,
-                            int64_t in_timestamp_ns,
-                            int* cropped_width,
-                            int* cropped_height,
-                            int* out_width,
-                            int* out_height);
+  virtual bool AdaptFrameResolution(int in_width,
+                                    int in_height,
+                                    int64_t in_timestamp_ns,
+                                    int* cropped_width,
+                                    int* cropped_height,
+                                    int* out_width,
+                                    int* out_height);
 
   // Requests the output frame size and frame interval from
   // |AdaptFrameResolution| to not be larger than |format|. Also, the input
   // frame size will be cropped to match the requested aspect ratio. The
   // requested aspect ratio is orientation agnostic and will be adjusted to
   // maintain the input orientation, so it doesn't matter if e.g. 1280x720 or
   // 720x1280 is requested.
   void OnOutputFormatRequest(const VideoFormat& format);
 
   // Requests the output frame size from |AdaptFrameResolution| to not have
   // more than |max_pixel_count| pixels and have "one step" up more pixels than
   // max_pixel_count_step_up.
-  void OnResolutionRequest(rtc::Optional<int> max_pixel_count,
-                           rtc::Optional<int> max_pixel_count_step_up);
+  virtual void OnResolutionRequest(rtc::Optional<int> max_pixel_count,
+                                   rtc::Optional<int> max_pixel_count_step_up);
 
   // Requests the output frame size from |AdaptFrameResolution| be scaled
   // down from the input by a factor of scale_resolution_by (min 1.0)
-  void OnScaleResolutionBy(rtc::Optional<float> scale_resolution_by);
+  virtual void OnScaleResolutionBy(rtc::Optional<float> scale_resolution_by);
 
  private:
   // Determine if frame should be dropped based on input fps and requested fps.
   bool KeepFrame(int64_t in_timestamp_ns);
 
   int frames_in_;         // Number of input frames.
   int frames_out_;        // Number of output frames.
   int frames_scaled_;     // Number of frames scaled.