Bug 1339134 - stop oscillation of droppedFrames stat; r?jib draft
authorNico Grunbaum
Mon, 13 Feb 2017 21:52:40 -0800
changeset 483477 743c8220b84c99a79cb55d6483d6430764072be1
parent 483394 dd3f337f0d9c2146c341a83a589cedbce13caee0
child 545651 26ac1662bea250d23361bff5f1630a258864b872
push id45324
push userna-g@nostrum.com
push dateTue, 14 Feb 2017 10:57:30 +0000
reviewersjib
bugs1339134
milestone54.0a1
Bug 1339134 - stop oscillation of droppedFrames stat; r?jib WIP - Still trying to track down where the bulk of the frame drops are occurring. MozReview-Commit-ID: 7Gn0L0WQ8wB
media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
media/webrtc/trunk/webrtc/modules/video_coding/include/video_coding_defines.h
media/webrtc/trunk/webrtc/modules/video_coding/video_sender.cc
media/webrtc/trunk/webrtc/video/send_statistics_proxy.cc
media/webrtc/trunk/webrtc/video/send_statistics_proxy.h
media/webrtc/trunk/webrtc/video/video_capture_input.cc
media/webrtc/trunk/webrtc/video/vie_encoder.cc
media/webrtc/trunk/webrtc/video_send_stream.h
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -123,16 +123,17 @@ WebrtcVideoConduit::SendStreamStatistics
   StreamStatistics::Update(aStats.encode_frame_rate, aStats.media_bitrate_bps);
   if (!aStats.substreams.empty()) {
     const webrtc::FrameCounts& fc =
       aStats.substreams.begin()->second.frame_counts;
     CSFLogVerbose(logTag, "%s: framerate: %u, bitrate: %u, dropped frames delta: %u",
                   __FUNCTION__, aStats.encode_frame_rate, aStats.media_bitrate_bps,
                   (mSentFrames - (fc.key_frames + fc.delta_frames)) - mDroppedFrames);
     mDroppedFrames = mSentFrames - (fc.key_frames + fc.delta_frames);
+    printf("@@NG mDroppedFrames=%d vs. VideoSendStream.stats.dropped_frames=%d\n", static_cast<int>(mDroppedFrames), static_cast<int>(aStats.dropped_frames));
   } else {
     CSFLogVerbose(logTag, "%s aStats.substreams is empty", __FUNCTION__);
   }
 };
 
 void
 WebrtcVideoConduit::ReceiveStreamStatistics::DiscardedPackets(
   uint32_t& aOutDiscPackets) const
@@ -1605,16 +1606,17 @@ WebrtcVideoConduit::ReconfigureSendCodec
     if (!mSendStream->ReconfigureVideoEncoder(mEncoderConfig.GenerateConfig())) {
       CSFLogError(logTag, "%s: ReconfigureVideoEncoder failed", __FUNCTION__);
       return NS_ERROR_FAILURE;
     }
 
     if (frame) {
       // XXX I really don't like doing this from MainThread...
       mSendStream->Input()->IncomingCapturedFrame(*frame);
+      mSendStreamStats.SentFrame();
       CSFLogDebug(logTag, "%s Inserted a frame from reconfig lambda", __FUNCTION__);
     }
   }
   return NS_OK;
 }
 
 unsigned int
 WebrtcVideoConduit::SelectSendFrameRate(const VideoCodecConfig* codecConfig,
--- a/media/webrtc/trunk/webrtc/modules/video_coding/include/video_coding_defines.h
+++ b/media/webrtc/trunk/webrtc/modules/video_coding/include/video_coding_defines.h
@@ -13,16 +13,17 @@
 
 #include "webrtc/modules/include/module_common_types.h"
 #include "webrtc/typedefs.h"
 #include "webrtc/video_frame.h"
 
 namespace webrtc {
 
 // Error codes
+#define VCM_FRAME_DROPPED 4
 #define VCM_FRAME_NOT_READY 3
 #define VCM_REQUEST_SLI 2
 #define VCM_MISSING_CALLBACK 1
 #define VCM_OK 0
 #define VCM_GENERAL_ERROR -1
 #define VCM_LEVEL_EXCEEDED -2
 #define VCM_MEMORY -3
 #define VCM_PARAMETER_ERROR -4
--- a/media/webrtc/trunk/webrtc/modules/video_coding/video_sender.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_coding/video_sender.cc
@@ -272,17 +272,17 @@ int32_t VideoSender::AddVideoFrame(const
   SetEncoderParameters(encoder_params);
   // TODO(holmer): Add support for dropping frames per stream. Currently we
   // only have one frame dropper for all streams.
   if (_nextFrameTypes[0] == kEmptyFrame) {
     return VCM_OK;
   }
   if (_mediaOpt.DropFrame()) {
     _encoder->OnDroppedFrame();
-    return VCM_OK;
+    return VCM_FRAME_DROPPED;
   }
   _mediaOpt.UpdateContentData(contentMetrics);
   // TODO(pbos): Make sure setting send codec is synchronized with video
 #ifdef VERIFY_FRAME_SIZE_VS_DATABASE
   // processing so frame size always matches.
   if (!_codecDataBase.MatchesCurrentResolution(videoFrame.width(),
                                                videoFrame.height())) {
     LOG(LS_ERROR) << "Incoming frame doesn't match set resolution. Dropping.";
--- a/media/webrtc/trunk/webrtc/video/send_statistics_proxy.cc
+++ b/media/webrtc/trunk/webrtc/video/send_statistics_proxy.cc
@@ -323,16 +323,21 @@ void SendStatisticsProxy::OnIncomingFram
 
 void SendStatisticsProxy::OnEncodedFrame(int encode_time_ms) {
   rtc::CritScope lock(&crit_);
   uma_container_->encode_time_counter_.Add(encode_time_ms);
   encode_time_.Apply(1.0f, encode_time_ms);
   stats_.avg_encode_time_ms = round(encode_time_.filtered());
 }
 
+void SendStatisticsProxy::OnFrameDropped() {
+  rtc::CritScope lock(&crit_);
+  ++stats_.dropped_frames;
+};
+
 void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
     uint32_t ssrc,
     const RtcpPacketTypeCounter& packet_counter) {
   rtc::CritScope lock(&crit_);
   VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
   if (stats == nullptr)
     return;
 
--- a/media/webrtc/trunk/webrtc/video/send_statistics_proxy.h
+++ b/media/webrtc/trunk/webrtc/video/send_statistics_proxy.h
@@ -24,22 +24,27 @@
 #include "webrtc/modules/video_coding/include/video_coding_defines.h"
 #include "webrtc/system_wrappers/include/clock.h"
 #include "webrtc/video/overuse_frame_detector.h"
 #include "webrtc/video/vie_encoder.h"
 #include "webrtc/video_send_stream.h"
 
 namespace webrtc {
 
+class FrameDroppedObserver {
+  virtual void OnFrameDropped() = 0;
+};
+
 class SendStatisticsProxy : public CpuOveruseMetricsObserver,
                             public RtcpStatisticsCallback,
                             public RtcpPacketTypeCounterObserver,
                             public StreamDataCountersCallback,
                             public BitrateStatisticsObserver,
                             public FrameCountObserver,
+                            public FrameDroppedObserver,
                             public VideoEncoderRateObserver,
                             public SendSideDelayObserver {
  public:
   static const int kStatsTimeoutMs;
 
   SendStatisticsProxy(Clock* clock,
                       const VideoSendStream::Config& config,
                       VideoEncoderConfig::ContentType content_type);
@@ -47,16 +52,19 @@ class SendStatisticsProxy : public CpuOv
 
   VideoSendStream::Stats GetStats();
 
   virtual void OnSendEncodedImage(const EncodedImage& encoded_image,
                                   const RTPVideoHeader* rtp_video_header);
   // Used to update incoming frame rate.
   void OnIncomingFrame(int width, int height);
 
+  // Used to update number of droppped frames.
+  void OnFrameDropped() override;
+
   // Used to update encode time of frames.
   void OnEncodedFrame(int encode_time_ms);
 
   // From VideoEncoderRateObserver.
   void OnSetRates(uint32_t bitrate_bps, int framerate) override;
 
   void OnEncoderImplementationName(const char* implementation_name);
   void OnOutgoingRate(uint32_t framerate, uint32_t bitrate);
--- a/media/webrtc/trunk/webrtc/video/video_capture_input.cc
+++ b/media/webrtc/trunk/webrtc/video/video_capture_input.cc
@@ -97,16 +97,17 @@ void VideoCaptureInput::IncomingCaptured
 
   CriticalSectionScoped cs(capture_cs_.get());
   if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) {
     // We don't allow the same capture time for two frames, drop this one.
     LOG(LS_WARNING) << "Same/old NTP timestamp ("
                     << incoming_frame.ntp_time_ms()
                     << " <= " << last_captured_timestamp_
                     << ") for incoming frame. Dropping.";
+    stats_proxy_->OnFrameDropped();
     return;
   }
 
   captured_frame_.ShallowCopy(incoming_frame);
   last_captured_timestamp_ = incoming_frame.ntp_time_ms();
 
   overuse_detector_->FrameCaptured(captured_frame_.width(),
                                    captured_frame_.height(),
--- a/media/webrtc/trunk/webrtc/video/vie_encoder.cc
+++ b/media/webrtc/trunk/webrtc/video/vie_encoder.cc
@@ -336,38 +336,45 @@ void ViEEncoder::TraceFrameDropEnd() {
   encoder_paused_and_dropped_frame_ = false;
 }
 
 void ViEEncoder::DeliverFrame(VideoFrame video_frame) {
   RTC_DCHECK(send_payload_router_ != NULL);
   if (!send_payload_router_->active()) {
     // We've paused or we have no channels attached, don't waste resources on
     // encoding.
+    stats_proxy_->OnFrameDropped();
     return;
   }
   VideoCodecType codec_type;
   {
     CriticalSectionScoped cs(data_cs_.get());
     time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
     if (EncoderPaused()) {
+      if (stats_proxy_) {
+        stats_proxy_->OnFrameDropped();
+      }
       TraceFrameDropStart();
       return;
     }
     TraceFrameDropEnd();
     codec_type = encoder_config_.codecType;
   }
 
   TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
                           "Encode");
   const VideoFrame* frame_to_send = &video_frame;
   // TODO(wuchengli): support texture frames.
   if (video_frame.native_handle() == NULL) {
     // Pass frame via preprocessor.
     frame_to_send = vp_->PreprocessFrame(video_frame);
     if (!frame_to_send) {
+      if (stats_proxy_) {
+          stats_proxy_->OnFrameDropped();
+      }
       // Drop this frame, or there was an error processing it.
       return;
     }
   }
 
   // If we haven't resampled the frame and we have a FrameCallback, we need to
   // make a deep copy of |video_frame|.
   VideoFrame copied_frame;
@@ -389,21 +396,29 @@ void ViEEncoder::DeliverFrame(VideoFrame
       codec_specific_info.codecSpecific.VP8.pictureIdRPSI =
           picture_id_rpsi_;
       codec_specific_info.codecSpecific.VP8.pictureIdSLI  =
           picture_id_sli_;
       has_received_sli_ = false;
       has_received_rpsi_ = false;
     }
 
-    vcm_->AddVideoFrame(*frame_to_send, vp_->GetContentMetrics(),
-                        &codec_specific_info);
+    if (VCM_OK != vcm_->AddVideoFrame(*frame_to_send,
+        vp_->GetContentMetrics(), &codec_specific_info)) {
+      if (stats_proxy_) {
+          stats_proxy_->OnFrameDropped();
+      }
+    }
     return;
   }
-  vcm_->AddVideoFrame(*frame_to_send);
+  if (VCM_OK != vcm_->AddVideoFrame(*frame_to_send)){
+    if (stats_proxy_) {
+        stats_proxy_->OnFrameDropped();
+    }
+  }
 }
 
 int ViEEncoder::SendKeyFrame() {
   return vcm_->IntraFrameRequest(0);
 }
 
 uint32_t ViEEncoder::LastObservedBitrateBps() const {
   CriticalSectionScoped cs(data_cs_.get());
--- a/media/webrtc/trunk/webrtc/video_send_stream.h
+++ b/media/webrtc/trunk/webrtc/video_send_stream.h
@@ -58,16 +58,17 @@ class VideoSendStream : public SendStrea
     int max_delay_ms = 0;
     StreamDataCounters rtp_stats;
     RtcpPacketTypeCounter rtcp_packet_type_counts;
     RtcpStatistics rtcp_stats;
   };
 
   struct Stats {
     std::string encoder_implementation_name = "unknown";
+    uint32_t dropped_frames = 0;
     int input_frame_rate = 0;
     int encode_frame_rate = 0;
     int avg_encode_time_ms = 0;
     int encode_usage_percent = 0;
     int target_media_bitrate_bps = 0;
     int media_bitrate_bps = 0;
     bool suspended = false;
     bool bw_limited_resolution = false;