Bug 1336792 - part 2: use pts to index input duration. r?jya
MozReview-Commit-ID: hvf0QeRzbX
--- a/dom/media/platforms/android/RemoteDataDecoder.cpp
+++ b/dom/media/platforms/android/RemoteDataDecoder.cpp
@@ -14,17 +14,17 @@
#include "VPXDecoder.h"
#include "nsThreadUtils.h"
#include "nsPromiseFlatString.h"
#include "nsIGfxInfo.h"
#include "prlog.h"
-#include <deque>
+#include "DurationMap.h"
#include <jni.h>
#undef LOG
#define LOG(arg, ...) MOZ_LOG(sAndroidDecoderModuleLog, \
mozilla::LogLevel::Debug, ("RemoteDataDecoder(%p)::%s: " arg, \
this, __func__, ##__VA_ARGS__))
@@ -145,21 +145,16 @@ public:
void HandleInputExhausted() override
{
mDecoder->InputExhausted();
}
void HandleOutput(Sample::Param aSample) override
{
- Maybe<int64_t> durationUs = mDecoder->mInputDurations.Get();
- if (!durationUs) {
- return;
- }
-
BufferInfo::LocalRef info = aSample->Info();
int32_t flags;
bool ok = NS_SUCCEEDED(info->Flags(&flags));
int32_t offset;
ok &= NS_SUCCEEDED(info->Offset(&offset));
@@ -170,36 +165,42 @@ public:
ok &= NS_SUCCEEDED(info->Size(&size));
if (!ok) {
HandleError(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
RESULT_DETAIL("VideoCallBack::HandleOutput")));
return;
}
+ bool isEOS = !!(flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM);
+ int64_t durationUs;
+ if (!mDecoder->mInputDurations.Find(presentationTimeUs, durationUs) && !isEOS) {
+ return;
+ }
+
if (size > 0) {
RefPtr<layers::Image> img = new SurfaceTextureImage(
mDecoder->mSurfaceTexture.get(), mDecoder->mConfig.mDisplay,
gl::OriginPos::BottomLeft);
RefPtr<VideoData> v = VideoData::CreateFromImage(
- mDecoder->mConfig, offset, presentationTimeUs, durationUs.value(),
+ mDecoder->mConfig, offset, presentationTimeUs, durationUs,
img, !!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
presentationTimeUs,
gfx::IntRect(0, 0, mDecoder->mConfig.mDisplay.width,
mDecoder->mConfig.mDisplay.height));
UniquePtr<VideoData::Listener> listener(
new RenderOrReleaseOutput(mDecoder->mJavaDecoder, aSample));
v->SetListener(Move(listener));
mDecoder->Output(v);
}
- if ((flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM) != 0) {
+ if (isEOS) {
mDecoder->DrainComplete();
}
}
void HandleError(const MediaResult& aError) override
{
mDecoder->Error(aError);
}
@@ -258,74 +259,32 @@ public:
}
RefPtr<MediaDataDecoder::FlushPromise> Flush() override
{
mInputDurations.Clear();
return RemoteDataDecoder::Flush();
}
- RefPtr<MediaDataDecoder::DecodePromise> Drain() override
- {
- mInputDurations.Put(0);
- return RemoteDataDecoder::Drain();
- }
-
RefPtr<MediaDataDecoder::DecodePromise> Decode(MediaRawData* aSample) override
{
- mInputDurations.Put(aSample->mDuration);
+ mInputDurations.Insert(aSample->mDuration, aSample->mTime);
return RemoteDataDecoder::Decode(aSample);
}
bool SupportDecoderRecycling() const override
{
return mIsCodecSupportAdaptivePlayback;
}
private:
- class DurationQueue
- {
- public:
-
- DurationQueue() : mMutex("Video duration queue") { }
-
- void Clear()
- {
- MutexAutoLock lock(mMutex);
- mValues.clear();
- }
-
- void Put(int64_t aDurationUs)
- {
- MutexAutoLock lock(mMutex);
- mValues.emplace_back(aDurationUs);
- }
-
- Maybe<int64_t> Get()
- {
- MutexAutoLock lock(mMutex);
- if (mValues.empty()) {
- return Nothing();
- }
-
- auto value = Some(mValues.front());
- mValues.pop_front();
-
- return value;
- }
-
- private:
- Mutex mMutex; // To protect mValues.
- std::deque<int64_t> mValues;
- };
-
layers::ImageContainer* mImageContainer;
const VideoInfo mConfig;
RefPtr<AndroidSurfaceTexture> mSurfaceTexture;
- DurationQueue mInputDurations;
+ DurationMap mInputDurations;
bool mIsCodecSupportAdaptivePlayback = false;
};
class RemoteAudioDecoder : public RemoteDataDecoder
{
public:
RemoteAudioDecoder(const AudioInfo& aConfig,
MediaFormat::Param aFormat,