Bug 1347439 - part 2: use pts to index input duration. r?jya draft
authorJohn Lin <jolin@mozilla.com>
Fri, 10 Feb 2017 13:58:10 +0800
changeset 498974 b80ffb29331b9142f41c95ee7c43617a769488aa
parent 498973 f80ca133f8e6e071adb1787861dda7476ded9517
child 498975 a003fbc97950a2c209300254874e7cf9ecf74d42
push id49299
push userbmo:jolin@mozilla.com
push dateWed, 15 Mar 2017 08:25:49 +0000
reviewersjya
bugs1347439, 1336792
milestone53.0
Bug 1347439 - part 2: use pts to index input duration. r?jya Cherry-picked from bug 1336792 part 2 MozReview-Commit-ID: HztFTJ1pn1m
dom/media/platforms/android/RemoteDataDecoder.cpp
--- a/dom/media/platforms/android/RemoteDataDecoder.cpp
+++ b/dom/media/platforms/android/RemoteDataDecoder.cpp
@@ -14,20 +14,19 @@
 #include "VPXDecoder.h"
 
 #include "nsThreadUtils.h"
 #include "nsPromiseFlatString.h"
 #include "nsIGfxInfo.h"
 
 #include "prlog.h"
 
+#include "DurationMap.h"
 #include <jni.h>
 
-#include <deque>
-
 #undef LOG
 #define LOG(arg, ...) MOZ_LOG(sAndroidDecoderModuleLog, \
     mozilla::LogLevel::Debug, ("RemoteDataDecoder(%p)::%s: " arg, \
       this, __func__, ##__VA_ARGS__))
 
 using namespace mozilla;
 using namespace mozilla::gl;
 using namespace mozilla::java;
@@ -143,21 +142,16 @@ public:
       : JavaCallbacksSupport(aCallback)
       , mDecoder(aDecoder)
     {}
 
     virtual ~CallbacksSupport() {}
 
     void HandleOutput(Sample::Param aSample) override
     {
-      Maybe<int64_t> durationUs = mDecoder->mInputDurations.Get();
-      if (!durationUs) {
-        return;
-      }
-
       BufferInfo::LocalRef info = aSample->Info();
 
       int32_t flags;
       bool ok = NS_SUCCEEDED(info->Flags(&flags));
       MOZ_ASSERT(ok);
 
       int32_t offset;
       ok |= NS_SUCCEEDED(info->Offset(&offset));
@@ -168,40 +162,42 @@ public:
       MOZ_ASSERT(ok);
 
       int32_t size;
       ok |= NS_SUCCEEDED(info->Size(&size));
       MOZ_ASSERT(ok);
 
       NS_ENSURE_TRUE_VOID(ok);
 
-      if (size > 0) {
-        RefPtr<layers::Image> img =
-          new SurfaceTextureImage(mDecoder->mSurfaceTexture.get(), mDecoder->mConfig.mDisplay,
-                                  gl::OriginPos::BottomLeft);
+      bool isEOS = !!(flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM);
+      int64_t durationUs = 0;
+      if (!mDecoder->mInputDurations.Find(presentationTimeUs, durationUs) && !isEOS) {
+        return;
+      }
 
-        RefPtr<VideoData> v =
-          VideoData::CreateFromImage(mDecoder->mConfig,
-                                    offset,
-                                    presentationTimeUs,
-                                    durationUs.value(),
-                                    img,
-                                    !!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
-                                    presentationTimeUs,
-                                    gfx::IntRect(0, 0,
-                                                  mDecoder->mConfig.mDisplay.width,
-                                                  mDecoder->mConfig.mDisplay.height));
+      if (size > 0) {
+        RefPtr<layers::Image> img = new SurfaceTextureImage(
+          mDecoder->mSurfaceTexture.get(), mDecoder->mConfig.mDisplay,
+          gl::OriginPos::BottomLeft);
 
-        UniquePtr<VideoData::Listener> listener(new RenderOrReleaseOutput(mDecoder->mJavaDecoder, aSample));
+        RefPtr<VideoData> v = VideoData::CreateFromImage(
+          mDecoder->mConfig, offset, presentationTimeUs, durationUs,
+          img, !!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
+          presentationTimeUs,
+          gfx::IntRect(0, 0, mDecoder->mConfig.mDisplay.width,
+                       mDecoder->mConfig.mDisplay.height));
+
+        UniquePtr<VideoData::Listener> listener(
+          new RenderOrReleaseOutput(mDecoder->mJavaDecoder, aSample));
         v->SetListener(Move(listener));
 
         mDecoderCallback->Output(v);
       }
 
-      if ((flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM) != 0) {
+      if (isEOS) {
         mDecoderCallback->DrainComplete();
       }
     }
 
     friend class RemoteDataDecoder;
 
   private:
     RemoteVideoDecoder* mDecoder;
@@ -253,70 +249,29 @@ public:
   }
 
   void Flush() override
   {
     mInputDurations.Clear();
     RemoteDataDecoder::Flush();
   }
 
-  void Drain() override
-  {
-    RemoteDataDecoder::Drain();
-    mInputDurations.Put(0);
-  }
-
   void Input(MediaRawData* aSample) override
   {
+    mInputDurations.Insert(aSample->mTime, aSample->mDuration);
     RemoteDataDecoder::Input(aSample);
-    mInputDurations.Put(aSample->mDuration);
   }
 
   bool SupportDecoderRecycling() const override { return mIsCodecSupportAdaptivePlayback; }
 
 private:
-  class DurationQueue {
-  public:
-
-    DurationQueue() : mMutex("Video duration queue") {}
-
-    void Clear()
-    {
-      MutexAutoLock lock(mMutex);
-      mValues.clear();
-    }
-
-    void Put(int64_t aDurationUs)
-    {
-      MutexAutoLock lock(mMutex);
-      mValues.emplace_back(aDurationUs);
-    }
-
-    Maybe<int64_t> Get()
-    {
-      MutexAutoLock lock(mMutex);
-      if (mValues.empty()) {
-        return Nothing();
-      }
-
-      auto value = Some(mValues.front());
-      mValues.pop_front();
-
-      return value;
-    }
-
-  private:
-    Mutex mMutex; // To protect mValues.
-    std::deque<int64_t> mValues;
-  };
-
   layers::ImageContainer* mImageContainer;
   const VideoInfo& mConfig;
   RefPtr<AndroidSurfaceTexture> mSurfaceTexture;
-  DurationQueue mInputDurations;
+  DurationMap mInputDurations;
   bool mIsCodecSupportAdaptivePlayback = false;
 };
 
 class RemoteEMEVideoDecoder : public RemoteVideoDecoder {
 public:
   RemoteEMEVideoDecoder(const VideoInfo& aConfig,
                         MediaFormat::Param aFormat,
                         MediaDataDecoderCallback* aCallback,