Bug 1344649 - part 3: store frame sizes in queue rather than relying on ConfigurationChanged(). r?jya
MozReview-Commit-ID: 49ICIBs4wzF
--- a/dom/media/platforms/android/RemoteDataDecoder.cpp
+++ b/dom/media/platforms/android/RemoteDataDecoder.cpp
@@ -5,19 +5,19 @@
#include "AndroidBridge.h"
#include "AndroidDecoderModule.h"
#include "AndroidSurfaceTexture.h"
#include "ArrayMap.h"
#include "FennecJNINatives.h"
#include "GLImages.h"
#include "MediaData.h"
#include "MediaInfo.h"
+#include "VideoUtils.h"
#include "VPXDecoder.h"
-#include "VideoUtils.h"
-#include "mozilla/Mutex.h"
+
#include "nsIGfxInfo.h"
#include "nsPromiseFlatString.h"
#include "nsThreadUtils.h"
#include "prlog.h"
#include <jni.h>
#undef LOG
#define LOG(arg, ...) \
@@ -131,16 +131,35 @@ public:
mCodec->ReleaseOutput(mSample, aToRender);
}
}
java::CodecProxy::GlobalRef mCodec;
java::Sample::GlobalRef mSample;
};
+
+ class InputInfo {
+ public:
+ InputInfo()
+ {
+ }
+
+ InputInfo(const int64_t aDurationUs, const gfx::IntSize& aImageSize, const gfx::IntSize& aDisplaySize)
+ : mDurationUs(aDurationUs)
+ , mImageSize(aImageSize)
+ , mDisplaySize(aDisplaySize)
+ {
+ }
+
+ int64_t mDurationUs;
+ gfx::IntSize mImageSize;
+ gfx::IntSize mDisplaySize;
+ };
+
class CallbacksSupport final : public JavaCallbacksSupport
{
public:
CallbacksSupport(RemoteVideoDecoder* aDecoder) : mDecoder(aDecoder) { }
void HandleInputExhausted() override
{
mDecoder->ReturnDecodedData();
@@ -167,31 +186,29 @@ public:
if (!ok) {
HandleError(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
RESULT_DETAIL("VideoCallBack::HandleOutput")));
return;
}
bool isEOS = !!(flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM);
- int64_t durationUs = 0;
- if (!mDecoder->mInputDurations.Find(presentationTimeUs, durationUs)
+ InputInfo inputInfo;
+ if (!mDecoder->mInputInfos.Find(presentationTimeUs, inputInfo)
&& !isEOS) {
return;
}
if (size > 0) {
- MutexAutoLock lock(mDecoder->mMutex);
-
RefPtr<layers::Image> img = new SurfaceTextureImage(
- mDecoder->mSurfaceTexture.get(), mDecoder->mConfig.mImage,
+ mDecoder->mSurfaceTexture.get(), inputInfo.mImageSize,
gl::OriginPos::BottomLeft);
RefPtr<VideoData> v = VideoData::CreateFromImage(
- mDecoder->mConfig.mDisplay, offset, presentationTimeUs, durationUs,
+ inputInfo.mDisplaySize, offset, presentationTimeUs, inputInfo.mDurationUs,
img, !!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
presentationTimeUs);
v->SetListener(Move(releaseSample));
mDecoder->Output(v);
}
@@ -213,17 +230,16 @@ public:
RemoteVideoDecoder(const VideoInfo& aConfig,
MediaFormat::Param aFormat,
layers::ImageContainer* aImageContainer,
const nsString& aDrmStubId, TaskQueue* aTaskQueue)
: RemoteDataDecoder(MediaData::Type::VIDEO_DATA, aConfig.mMimeType,
aFormat, aDrmStubId, aTaskQueue)
, mImageContainer(aImageContainer)
- , mMutex("RemoteVideoDecoder Mutex")
, mConfig(aConfig)
{
}
RefPtr<InitPromise> Init() override
{
mSurfaceTexture = AndroidSurfaceTexture::Create();
if (!mSurfaceTexture) {
@@ -256,44 +272,41 @@ public:
mIsCodecSupportAdaptivePlayback =
mJavaDecoder->IsAdaptivePlaybackSupported();
return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
}
RefPtr<MediaDataDecoder::FlushPromise> Flush() override
{
- mInputDurations.Clear();
+ mInputInfos.Clear();
return RemoteDataDecoder::Flush();
}
RefPtr<MediaDataDecoder::DecodePromise> Decode(MediaRawData* aSample) override
{
- mInputDurations.Insert(aSample->mTime, aSample->mDuration);
+ const VideoInfo* config = aSample->mTrackInfo->GetAsVideoInfo();
+ MOZ_ASSERT(config);
+
+ InputInfo info(aSample->mDuration, config->mImage, config->mDisplay);
+ mInputInfos.Insert(aSample->mTime, info);
return RemoteDataDecoder::Decode(aSample);
}
bool SupportDecoderRecycling() const override
{
return mIsCodecSupportAdaptivePlayback;
}
- void ConfigurationChanged(const TrackInfo& aConfig) override
- {
- MOZ_ASSERT(aConfig.GetAsVideoInfo());
- MutexAutoLock lock(mMutex);
- mConfig = *aConfig.GetAsVideoInfo();
- }
private:
layers::ImageContainer* mImageContainer;
+ const VideoInfo mConfig;
RefPtr<AndroidSurfaceTexture> mSurfaceTexture;
- ArrayMap<int64_t> mInputDurations;
+ ArrayMap<InputInfo> mInputInfos;
bool mIsCodecSupportAdaptivePlayback = false;
- Mutex mMutex; // Protects mConfig
- VideoInfo mConfig;
};
class RemoteAudioDecoder : public RemoteDataDecoder
{
public:
RemoteAudioDecoder(const AudioInfo& aConfig,
MediaFormat::Param aFormat,
const nsString& aDrmStubId, TaskQueue* aTaskQueue)
--- a/dom/media/platforms/wrappers/H264Converter.cpp
+++ b/dom/media/platforms/wrappers/H264Converter.cpp
@@ -318,16 +318,17 @@ H264Converter::CheckForSPSChange(MediaRa
return NS_OK;
}
RefPtr<MediaRawData> sample = aSample;
if (CanRecycleDecoder()) {
// Do not recreate the decoder, reuse it.
UpdateConfigFromExtraData(extra_data);
+ sample->mTrackInfo = new TrackInfoSharedPtr(mCurrentConfig, 0);
// Ideally we would want to drain the decoder instead of flushing it.
// However the draining operation requires calling Drain and looping several
// times which isn't possible from within the H264Converter. So instead we
// flush the decoder. In practice, this is a no-op as SPS change will only
// be used with MSE. And with MSE, the MediaFormatReader would have drained
// the decoder already.
RefPtr<H264Converter> self = this;
mDecoder->Flush()