Bug 1344649 - part 3: store frame sizes in queue rather than relying on ConfigurationChanged(). r?jya draft
authorJohn Lin <jolin@mozilla.com>
Tue, 07 Mar 2017 20:03:41 +0800
changeset 495037 b884ffbece2334a377c7539faa4f7255dee494d7
parent 495036 11910ff3cf8bd699aa7f591dfc575ad2a96fb81b
child 495038 3e78f741648a5233c60ba2b54167308a335f9913
push id48207
push userbmo:jolin@mozilla.com
push dateWed, 08 Mar 2017 06:04:43 +0000
reviewersjya
bugs1344649
milestone55.0a1
Bug 1344649 - part 3: store frame sizes in queue rather than relying on ConfigurationChanged(). r?jya MozReview-Commit-ID: 49ICIBs4wzF
dom/media/platforms/android/RemoteDataDecoder.cpp
dom/media/platforms/wrappers/H264Converter.cpp
--- a/dom/media/platforms/android/RemoteDataDecoder.cpp
+++ b/dom/media/platforms/android/RemoteDataDecoder.cpp
@@ -5,19 +5,19 @@
 #include "AndroidBridge.h"
 #include "AndroidDecoderModule.h"
 #include "AndroidSurfaceTexture.h"
 #include "ArrayMap.h"
 #include "FennecJNINatives.h"
 #include "GLImages.h"
 #include "MediaData.h"
 #include "MediaInfo.h"
+#include "VideoUtils.h"
 #include "VPXDecoder.h"
-#include "VideoUtils.h"
-#include "mozilla/Mutex.h"
+
 #include "nsIGfxInfo.h"
 #include "nsPromiseFlatString.h"
 #include "nsThreadUtils.h"
 #include "prlog.h"
 #include <jni.h>
 
 #undef LOG
 #define LOG(arg, ...)                                                          \
@@ -131,16 +131,35 @@ public:
         mCodec->ReleaseOutput(mSample, aToRender);
       }
     }
 
     java::CodecProxy::GlobalRef mCodec;
     java::Sample::GlobalRef mSample;
   };
 
+
+  class InputInfo {
+  public:
+    InputInfo()
+    {
+    }
+
+    InputInfo(const int64_t aDurationUs, const gfx::IntSize& aImageSize, const gfx::IntSize& aDisplaySize)
+      : mDurationUs(aDurationUs)
+      , mImageSize(aImageSize)
+      , mDisplaySize(aDisplaySize)
+    {
+    }
+
+    int64_t mDurationUs;
+    gfx::IntSize mImageSize;
+    gfx::IntSize mDisplaySize;
+  };
+
   class CallbacksSupport final : public JavaCallbacksSupport
   {
   public:
     CallbacksSupport(RemoteVideoDecoder* aDecoder) : mDecoder(aDecoder) { }
 
     void HandleInputExhausted() override
     {
       mDecoder->ReturnDecodedData();
@@ -167,31 +186,29 @@ public:
 
       if (!ok) {
         HandleError(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
                                 RESULT_DETAIL("VideoCallBack::HandleOutput")));
         return;
       }
 
       bool isEOS = !!(flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM);
-      int64_t durationUs = 0;
-      if (!mDecoder->mInputDurations.Find(presentationTimeUs, durationUs)
+      InputInfo inputInfo;
+      if (!mDecoder->mInputInfos.Find(presentationTimeUs, inputInfo)
           && !isEOS) {
         return;
       }
 
       if (size > 0) {
-        MutexAutoLock lock(mDecoder->mMutex);
-
         RefPtr<layers::Image> img = new SurfaceTextureImage(
-          mDecoder->mSurfaceTexture.get(), mDecoder->mConfig.mImage,
+          mDecoder->mSurfaceTexture.get(), inputInfo.mImageSize,
           gl::OriginPos::BottomLeft);
 
         RefPtr<VideoData> v = VideoData::CreateFromImage(
-          mDecoder->mConfig.mDisplay, offset, presentationTimeUs, durationUs,
+          inputInfo.mDisplaySize, offset, presentationTimeUs, inputInfo.mDurationUs,
           img, !!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
           presentationTimeUs);
 
         v->SetListener(Move(releaseSample));
 
         mDecoder->Output(v);
       }
 
@@ -213,17 +230,16 @@ public:
 
   RemoteVideoDecoder(const VideoInfo& aConfig,
                      MediaFormat::Param aFormat,
                      layers::ImageContainer* aImageContainer,
                      const nsString& aDrmStubId, TaskQueue* aTaskQueue)
     : RemoteDataDecoder(MediaData::Type::VIDEO_DATA, aConfig.mMimeType,
                         aFormat, aDrmStubId, aTaskQueue)
     , mImageContainer(aImageContainer)
-    , mMutex("RemoteVideoDecoder Mutex")
     , mConfig(aConfig)
   {
   }
 
   RefPtr<InitPromise> Init() override
   {
     mSurfaceTexture = AndroidSurfaceTexture::Create();
     if (!mSurfaceTexture) {
@@ -256,44 +272,41 @@ public:
     mIsCodecSupportAdaptivePlayback =
       mJavaDecoder->IsAdaptivePlaybackSupported();
 
     return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
   }
 
   RefPtr<MediaDataDecoder::FlushPromise> Flush() override
   {
-    mInputDurations.Clear();
+    mInputInfos.Clear();
     return RemoteDataDecoder::Flush();
   }
 
   RefPtr<MediaDataDecoder::DecodePromise> Decode(MediaRawData* aSample) override
   {
-    mInputDurations.Insert(aSample->mTime, aSample->mDuration);
+    const VideoInfo* config = aSample->mTrackInfo->GetAsVideoInfo();
+    MOZ_ASSERT(config);
+
+    InputInfo info(aSample->mDuration, config->mImage, config->mDisplay);
+    mInputInfos.Insert(aSample->mTime, info);
     return RemoteDataDecoder::Decode(aSample);
   }
 
   bool SupportDecoderRecycling() const override
   {
     return mIsCodecSupportAdaptivePlayback;
   }
-  void ConfigurationChanged(const TrackInfo& aConfig) override
-  {
-    MOZ_ASSERT(aConfig.GetAsVideoInfo());
-    MutexAutoLock lock(mMutex);
-    mConfig = *aConfig.GetAsVideoInfo();
-  }
 
 private:
   layers::ImageContainer* mImageContainer;
+  const VideoInfo mConfig;
   RefPtr<AndroidSurfaceTexture> mSurfaceTexture;
-  ArrayMap<int64_t> mInputDurations;
+  ArrayMap<InputInfo> mInputInfos;
   bool mIsCodecSupportAdaptivePlayback = false;
-  Mutex mMutex; // Protects mConfig
-  VideoInfo mConfig;
 };
 
 class RemoteAudioDecoder : public RemoteDataDecoder
 {
 public:
   RemoteAudioDecoder(const AudioInfo& aConfig,
                      MediaFormat::Param aFormat,
                      const nsString& aDrmStubId, TaskQueue* aTaskQueue)
--- a/dom/media/platforms/wrappers/H264Converter.cpp
+++ b/dom/media/platforms/wrappers/H264Converter.cpp
@@ -318,16 +318,17 @@ H264Converter::CheckForSPSChange(MediaRa
         return NS_OK;
       }
 
   RefPtr<MediaRawData> sample = aSample;
 
   if (CanRecycleDecoder()) {
     // Do not recreate the decoder, reuse it.
     UpdateConfigFromExtraData(extra_data);
+    sample->mTrackInfo = new TrackInfoSharedPtr(mCurrentConfig, 0);
     // Ideally we would want to drain the decoder instead of flushing it.
     // However the draining operation requires calling Drain and looping several
     // times which isn't possible from within the H264Converter. So instead we
     // flush the decoder. In practice, this is a no-op as SPS change will only
     // be used with MSE. And with MSE, the MediaFormatReader would have drained
     // the decoder already.
     RefPtr<H264Converter> self = this;
     mDecoder->Flush()