Bug 1471588 - Trace all NotifyPull calls. r?padenot draft
authorAndreas Pehrson <pehrsons@mozilla.com>
Wed, 27 Jun 2018 17:59:19 +0200
changeset 819615 209e27cc11d0921624f5436df1eb46aec60a7cf3
parent 819614 9fe0911aead12e7782168244df8df2e88fadf978
child 819616 e430563150fdd74c5df994a2f1e6ab2a0bee4eb8
push id116598
push userbmo:apehrson@mozilla.com
push dateWed, 18 Jul 2018 08:46:17 +0000
reviewerspadenot
bugs1471588
milestone63.0a1
Bug 1471588 - Trace all NotifyPull calls. r?padenot MozReview-Commit-ID: XlYfZ0CVZM
dom/media/CanvasCaptureMediaStream.cpp
dom/media/webrtc/MediaEngineDefault.cpp
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
dom/media/webrtc/MediaEngineTabVideoSource.cpp
dom/media/webrtc/MediaEngineWebRTCAudio.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
--- a/dom/media/CanvasCaptureMediaStream.cpp
+++ b/dom/media/CanvasCaptureMediaStream.cpp
@@ -9,16 +9,17 @@
 #include "ImageContainer.h"
 #include "MediaStreamGraph.h"
 #include "MediaStreamListener.h"
 #include "gfxPlatform.h"
 #include "mozilla/Atomics.h"
 #include "mozilla/dom/CanvasCaptureMediaStreamBinding.h"
 #include "mozilla/gfx/2D.h"
 #include "nsContentUtils.h"
+#include "Tracing.h"
 
 using namespace mozilla::layers;
 using namespace mozilla::gfx;
 
 namespace mozilla {
 namespace dom {
 
 class OutputStreamDriver::StreamListener : public MediaStreamListener
@@ -46,16 +47,18 @@ public:
     MutexAutoLock lock(mMutex);
     mImage = aImage;
     mImageTime = aTime;
   }
 
   void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) override
   {
     // Called on the MediaStreamGraph thread.
+    TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i",
+                                 mSourceStream.get(), mTrackId);
     MOZ_ASSERT(mSourceStream);
     StreamTime delta = aDesiredTime - mSourceStream->GetEndOfAppendedData(mTrackId);
     if (delta > 0) {
       MutexAutoLock lock(mMutex);
 
       RefPtr<Image> image = mImage;
       IntSize size = image ? image->GetSize() : IntSize(0, 0);
       VideoSegment segment;
--- a/dom/media/webrtc/MediaEngineDefault.cpp
+++ b/dom/media/webrtc/MediaEngineDefault.cpp
@@ -12,16 +12,17 @@
 #include "mozilla/dom/File.h"
 #include "mozilla/UniquePtr.h"
 #include "nsCOMPtr.h"
 #include "nsContentUtils.h"
 #include "nsIFile.h"
 #include "nsIFilePicker.h"
 #include "nsIPrefBranch.h"
 #include "nsIPrefService.h"
+#include "Tracing.h"
 
 #ifdef MOZ_WIDGET_ANDROID
 #include "nsISupportsUtils.h"
 #endif
 
 #ifdef MOZ_WEBRTC
 #include "YuvStamper.h"
 #endif
@@ -325,16 +326,18 @@ MediaEngineDefaultVideoSource::GenerateF
 
 void
 MediaEngineDefaultVideoSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
                                     const RefPtr<SourceMediaStream>& aStream,
                                     TrackID aTrackID,
                                     StreamTime aDesiredTime,
                                     const PrincipalHandle& aPrincipalHandle)
 {
+  TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i",
+                               aStream.get(), aTrackID);
   // AppendFrame takes ownership of `segment`
   VideoSegment segment;
 
   RefPtr<layers::Image> image;
   {
     MutexAutoLock lock(mMutex);
     // Started - append real image
     // Stopped - append null
@@ -537,16 +540,18 @@ MediaEngineDefaultAudioSource::AppendToS
 
 void
 MediaEngineDefaultAudioSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
                                     const RefPtr<SourceMediaStream>& aStream,
                                     TrackID aTrackID,
                                     StreamTime aDesiredTime,
                                     const PrincipalHandle& aPrincipalHandle)
 {
+  TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i",
+                               aStream.get(), aTrackID);
   AudioSegment segment;
   // avoid accumulating rounding errors
   TrackTicks desired = aStream->TimeToTicksRoundUp(aStream->GraphRate(), aDesiredTime);
   TrackTicks delta = desired - mLastNotify;
   mLastNotify += delta;
   AppendToSegment(segment, delta, aPrincipalHandle);
   aStream->AppendToTrack(aTrackID, &segment);
 }
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -7,16 +7,17 @@
 
 #include "AllocationHandle.h"
 #include "CamerasChild.h"
 #include "MediaManager.h"
 #include "MediaTrackConstraints.h"
 #include "mozilla/ErrorNames.h"
 #include "mozilla/RefPtr.h"
 #include "nsIPrefService.h"
+#include "Tracing.h"
 #include "VideoFrameUtils.h"
 #include "VideoUtils.h"
 #include "webrtc/common_video/include/video_frame_buffer.h"
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 
 mozilla::LogModule* GetMediaManagerLog();
 #define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
 #define LOGFRAME(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
@@ -481,16 +482,18 @@ MediaEngineRemoteVideoSource::GetCapabil
 
 void
 MediaEngineRemoteVideoSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
                                    const RefPtr<SourceMediaStream>& aStream,
                                    TrackID aTrackID,
                                    StreamTime aDesiredTime,
                                    const PrincipalHandle& aPrincipalHandle)
 {
+  TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i",
+                               aStream.get(), aTrackID);
   MutexAutoLock lock(mMutex);
   if (mState == kReleased) {
     // We end the track before deallocating, so this is safe.
     return;
   }
 
   MOZ_ASSERT(mState == kStarted || mState == kStopped);
 
--- a/dom/media/webrtc/MediaEngineTabVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.cpp
@@ -20,16 +20,17 @@
 #include "ImageContainer.h"
 #include "Layers.h"
 #include "nsIInterfaceRequestorUtils.h"
 #include "nsITabSource.h"
 #include "VideoUtils.h"
 #include "nsServiceManagerUtils.h"
 #include "nsIPrefService.h"
 #include "MediaTrackConstraints.h"
+#include "Tracing.h"
 
 namespace mozilla {
 
 using namespace mozilla::gfx;
 
 MediaEngineTabVideoSource::MediaEngineTabVideoSource()
   : mMutex("MediaEngineTabVideoSource::mMutex") {}
 
@@ -252,16 +253,18 @@ MediaEngineTabVideoSource::Start(const R
 
 void
 MediaEngineTabVideoSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
                                 const RefPtr<SourceMediaStream>& aStream,
                                 TrackID aTrackID,
                                 StreamTime aDesiredTime,
                                 const PrincipalHandle& aPrincipalHandle)
 {
+  TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i",
+                               aStream.get(), aTrackID);
   VideoSegment segment;
   RefPtr<layers::Image> image;
   gfx::IntSize imageSize;
 
   {
     MutexAutoLock lock(mMutex);
     if (mState == kReleased) {
       // We end the track before setting the state to released.
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -783,16 +783,18 @@ MediaEngineWebRTCMicrophoneSource::GetSe
 
 void
 MediaEngineWebRTCMicrophoneSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
                                         const RefPtr<SourceMediaStream>& aStream,
                                         TrackID aTrackID,
                                         StreamTime aDesiredTime,
                                         const PrincipalHandle& aPrincipalHandle)
 {
+  TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i",
+                               aStream.get(), aTrackID);
   StreamTime delta;
 
   {
     MutexAutoLock lock(mMutex);
     size_t i = mAllocations.IndexOf(aHandle, 0, AllocationHandleComparator());
     if (i == mAllocations.NoIndex) {
       // This handle must have been deallocated. That's fine, and its track
       // will already be ended. No need to do anything.
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -1995,17 +1995,17 @@ private:
   ~PipelineListener()
   {
     NS_ReleaseOnMainThreadSystemGroup("MediaPipeline::mConduit",
                                       mConduit.forget());
   }
 
   void NotifyPullImpl(StreamTime aDesiredTime)
   {
-    TRACE();
+    TRACE_AUDIO_CALLBACK_COMMENT("Track %i", mTrackId);
     uint32_t samplesPer10ms = mRate / 100;
 
     // mSource's rate is not necessarily the same as the graph rate, since there
     // are sample-rate constraints on the inbound audio: only 16, 32, 44.1 and
     // 48kHz are supported. The audio frames we get here is going to be
     // resampled when inserted into the graph.
     TrackTicks desired = mSource->TimeToTicksRoundUp(mRate, aDesiredTime);
     TrackTicks framesNeeded = desired - mPlayedTicks;
@@ -2174,16 +2174,17 @@ public:
     , mMutex("Video PipelineListener")
   {
     AddTrackToSource();
   }
 
   // Implement MediaStreamListener
   void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) override
   {
+    TRACE_AUDIO_CALLBACK_COMMENT("Track %i", mTrackId);
     MutexAutoLock lock(mMutex);
 
     RefPtr<Image> image = mImage;
     StreamTime delta = aDesiredTime - mPlayedTicks;
 
     // Don't append if we've already provided a frame that supposedly
     // goes past the current aDesiredTime Doing so means a negative
     // delta and thus messes up handling of the graph