Bug 1201363 - Replace VideoFrameContainer with MediaStreamVideoSink in MSG. r?jesup
Replace the pointer of VideoFrameContainer with the pointer of MediaStreamVideoSink.
MozReview-Commit-ID: 5bqEMpemwuR
--- a/dom/camera/CameraPreviewMediaStream.cpp
+++ b/dom/camera/CameraPreviewMediaStream.cpp
@@ -1,16 +1,17 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "CameraPreviewMediaStream.h"
#include "CameraCommon.h"
#include "MediaStreamListener.h"
+#include "VideoFrameContainer.h"
/**
* Maximum number of outstanding invalidates before we start to drop frames;
* if we hit this threshold, it is an indicator that the main thread is
* either very busy or the device is busy elsewhere (e.g. encoding or
* persisting video data).
*/
#define MAX_INVALIDATE_PENDING 4
@@ -54,28 +55,28 @@ CameraPreviewMediaStream::SetAudioOutput
}
void
CameraPreviewMediaStream::RemoveAudioOutput(void* aKey)
{
}
void
-CameraPreviewMediaStream::AddVideoOutput(VideoFrameContainer* aContainer)
+CameraPreviewMediaStream::AddVideoOutput(MediaStreamVideoSink* aSink)
{
MutexAutoLock lock(mMutex);
- RefPtr<VideoFrameContainer> container = aContainer;
- AddVideoOutputImpl(container.forget());
+ RefPtr<MediaStreamVideoSink> sink = aSink;
+ AddVideoOutputImpl(sink.forget());
}
void
-CameraPreviewMediaStream::RemoveVideoOutput(VideoFrameContainer* aContainer)
+CameraPreviewMediaStream::RemoveVideoOutput(MediaStreamVideoSink* aSink)
{
MutexAutoLock lock(mMutex);
- RemoveVideoOutputImpl(aContainer);
+ RemoveVideoOutputImpl(aSink);
}
void
CameraPreviewMediaStream::AddListener(MediaStreamListener* aListener)
{
MutexAutoLock lock(mMutex);
MediaStreamListener* listener = *mListeners.AppendElement() = aListener;
@@ -120,18 +121,21 @@ CameraPreviewMediaStream::Destroy()
DestroyImpl();
}
void
CameraPreviewMediaStream::Invalidate()
{
MutexAutoLock lock(mMutex);
--mInvalidatePending;
- for (nsTArray<RefPtr<VideoFrameContainer> >::size_type i = 0; i < mVideoOutputs.Length(); ++i) {
- VideoFrameContainer* output = mVideoOutputs[i];
+ for (MediaStreamVideoSink* sink : mVideoOutputs) {
+ VideoFrameContainer* output = sink->AsVideoFrameContainer();
+ if (!output) {
+ continue;
+ }
output->Invalidate();
}
}
void
CameraPreviewMediaStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
uint32_t aFlags)
{
@@ -159,32 +163,38 @@ CameraPreviewMediaStream::SetCurrentFram
}
DOM_CAMERA_LOGI("Update preview frame, %d invalidation(s) pending",
mInvalidatePending);
}
mDiscardedFrames = 0;
TimeStamp now = TimeStamp::Now();
- for (nsTArray<RefPtr<VideoFrameContainer> >::size_type i = 0; i < mVideoOutputs.Length(); ++i) {
- VideoFrameContainer* output = mVideoOutputs[i];
+ for (MediaStreamVideoSink* sink : mVideoOutputs) {
+ VideoFrameContainer* output = sink->AsVideoFrameContainer();
+ if (!output) {
+ continue;
+ }
output->SetCurrentFrame(aIntrinsicSize, aImage, now);
}
++mInvalidatePending;
}
NS_DispatchToMainThread(NewRunnableMethod(this, &CameraPreviewMediaStream::Invalidate));
}
void
CameraPreviewMediaStream::ClearCurrentFrame()
{
MutexAutoLock lock(mMutex);
- for (nsTArray<RefPtr<VideoFrameContainer> >::size_type i = 0; i < mVideoOutputs.Length(); ++i) {
- VideoFrameContainer* output = mVideoOutputs[i];
+ for (MediaStreamVideoSink* sink : mVideoOutputs) {
+ VideoFrameContainer* output = sink->AsVideoFrameContainer();
+ if (!output) {
+ continue;
+ }
output->ClearCurrentFrame();
NS_DispatchToMainThread(NewRunnableMethod(output, &VideoFrameContainer::Invalidate));
}
}
} // namespace mozilla
--- a/dom/camera/CameraPreviewMediaStream.h
+++ b/dom/camera/CameraPreviewMediaStream.h
@@ -1,21 +1,22 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef DOM_CAMERA_CAMERAPREVIEWMEDIASTREAM_H
#define DOM_CAMERA_CAMERAPREVIEWMEDIASTREAM_H
-#include "VideoFrameContainer.h"
#include "MediaStreamGraph.h"
#include "mozilla/Mutex.h"
namespace mozilla {
+class MediaStreamVideoSink;
+
class FakeMediaStreamGraph : public MediaStreamGraph
{
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(FakeMediaStreamGraph)
public:
FakeMediaStreamGraph()
: MediaStreamGraph(16000)
{
}
@@ -40,18 +41,18 @@ class CameraPreviewMediaStream : public
typedef mozilla::layers::Image Image;
public:
CameraPreviewMediaStream();
virtual void AddAudioOutput(void* aKey) override;
virtual void SetAudioOutputVolume(void* aKey, float aVolume) override;
virtual void RemoveAudioOutput(void* aKey) override;
- virtual void AddVideoOutput(VideoFrameContainer* aContainer) override;
- virtual void RemoveVideoOutput(VideoFrameContainer* aContainer) override;
+ virtual void AddVideoOutput(MediaStreamVideoSink* aSink) override;
+ virtual void RemoveVideoOutput(MediaStreamVideoSink* aSink) override;
virtual void Suspend() override {}
virtual void Resume() override {}
virtual void AddListener(MediaStreamListener* aListener) override;
virtual void RemoveListener(MediaStreamListener* aListener) override;
virtual void Destroy() override;
void OnPreviewStateChange(bool aActive);
void Invalidate();
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -70,16 +70,17 @@
#include "MediaMetadataManager.h"
#include "MediaSourceDecoder.h"
#include "MediaStreamListener.h"
#include "DOMMediaStream.h"
#include "AudioStreamTrack.h"
#include "VideoStreamTrack.h"
#include "MediaTrackList.h"
#include "MediaStreamError.h"
+#include "VideoFrameContainer.h"
#include "AudioChannelService.h"
#include "mozilla/dom/power/PowerManagerService.h"
#include "mozilla/dom/WakeLock.h"
#include "mozilla/dom/AudioTrack.h"
#include "mozilla/dom/AudioTrackList.h"
--- a/dom/html/HTMLVideoElement.cpp
+++ b/dom/html/HTMLVideoElement.cpp
@@ -12,16 +12,17 @@
#include "nsSize.h"
#include "nsError.h"
#include "nsNodeInfoManager.h"
#include "plbase64.h"
#include "nsXPCOMStrings.h"
#include "prlock.h"
#include "nsThreadUtils.h"
#include "ImageContainer.h"
+#include "VideoFrameContainer.h"
#include "nsIScriptSecurityManager.h"
#include "nsIXPConnect.h"
#include "nsITimer.h"
#include "MediaError.h"
#include "MediaDecoder.h"
--- a/dom/media/MediaDecoder.cpp
+++ b/dom/media/MediaDecoder.cpp
@@ -9,16 +9,17 @@
#include "mozilla/MathAlgorithms.h"
#include <limits>
#include "nsIObserver.h"
#include "nsTArray.h"
#include "VideoUtils.h"
#include "MediaDecoderStateMachine.h"
#include "ImageContainer.h"
#include "MediaResource.h"
+#include "VideoFrameContainer.h"
#include "nsError.h"
#include "mozilla/Preferences.h"
#include "mozilla/StaticPtr.h"
#include "nsIMemoryReporter.h"
#include "nsComponentManagerUtils.h"
#include <algorithm>
#include "MediaShutdownManager.h"
#include "AudioChannelService.h"
--- a/dom/media/MediaFormatReader.cpp
+++ b/dom/media/MediaFormatReader.cpp
@@ -12,16 +12,17 @@
#include "nsSize.h"
#include "Layers.h"
#include "MediaData.h"
#include "MediaInfo.h"
#include "MediaFormatReader.h"
#include "MediaResource.h"
#include "mozilla/SharedThreadPool.h"
#include "VideoUtils.h"
+#include "VideoFrameContainer.h"
#include <algorithm>
#ifdef MOZ_EME
#include "mozilla/CDMProxy.h"
#endif
using namespace mozilla::media;
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -22,16 +22,17 @@
#include "AudioChannelService.h"
#include "AudioNodeStream.h"
#include "AudioNodeExternalInputStream.h"
#include "MediaStreamListener.h"
#include "mozilla/dom/AudioContextBinding.h"
#include "mozilla/media/MediaUtils.h"
#include <algorithm>
#include "GeckoProfiler.h"
+#include "VideoFrameContainer.h"
#include "mozilla/unused.h"
#include "mozilla/media/MediaUtils.h"
#ifdef MOZ_WEBRTC
#include "AudioOutputObserver.h"
#endif
#include "mtransport/runnable_utils.h"
#include "webaudio/blink/HRTFDatabaseLoader.h"
@@ -1015,17 +1016,25 @@ MediaStreamGraphImpl::PlayVideo(MediaStr
// tracks.
// frameBufferTime is in the non-blocking interval.
GraphTime frameTime = aStream->StreamTimeToGraphTime(frameBufferTime);
TimeStamp targetTime = currentTimeStamp +
TimeDuration::FromSeconds(MediaTimeToSeconds(frameTime - IterationEnd()));
if (frame->GetForceBlack()) {
if (!blackImage) {
- blackImage = aStream->mVideoOutputs[0]->GetImageContainer()->CreatePlanarYCbCrImage();
+ // Fixme: PlayVideo will be replaced in latter changeset
+ // "Call MediaStreamVideoSink::setCurrentFrames in SourceMediaStream::AppendToTrack."
+ // of this bug.
+ // This is a temp workaround to pass the build and test.
+ if (!aStream->mVideoOutputs[0]->AsVideoFrameContainer()) {
+ return;
+ }
+ blackImage = aStream->mVideoOutputs[0]->AsVideoFrameContainer()->
+ GetImageContainer()->CreatePlanarYCbCrImage();
if (blackImage) {
// Sets the image to a single black pixel, which will be scaled to
// fill the rendered size.
SetImageToBlackPixel(blackImage->AsPlanarYCbCrImage());
}
}
if (blackImage) {
image = blackImage;
@@ -1039,18 +1048,21 @@ MediaStreamGraphImpl::PlayVideo(MediaStr
}
if (!aStream->mLastPlayedVideoFrame.GetImage())
return;
AutoTArray<ImageContainer::NonOwningImage,4> images;
bool haveMultipleImages = false;
- for (uint32_t i = 0; i < aStream->mVideoOutputs.Length(); ++i) {
- VideoFrameContainer* output = aStream->mVideoOutputs[i];
+ for (MediaStreamVideoSink* sink : aStream->mVideoOutputs) {
+ VideoFrameContainer* output = sink->AsVideoFrameContainer();
+ if (!output) {
+ continue;
+ }
bool principalHandleChanged =
lastPrincipalHandle != PRINCIPAL_HANDLE_NONE &&
lastPrincipalHandle != output->GetLastPrincipalHandle();
// Find previous frames that may still be valid.
AutoTArray<ImageContainer::OwningImage,4> previousImages;
output->GetImageContainer()->GetCurrentImages(&previousImages);
@@ -2254,65 +2266,65 @@ MediaStream::RemoveAudioOutput(void* aKe
mStream->RemoveAudioOutputImpl(mKey);
}
void* mKey;
};
GraphImpl()->AppendMessage(MakeUnique<Message>(this, aKey));
}
void
-MediaStream::AddVideoOutputImpl(already_AddRefed<VideoFrameContainer> aContainer)
+MediaStream::AddVideoOutputImpl(already_AddRefed<MediaStreamVideoSink> aSink)
{
- RefPtr<VideoFrameContainer> container = aContainer;
- STREAM_LOG(LogLevel::Info, ("MediaStream %p Adding VideoFrameContainer %p as output",
- this, container.get()));
- *mVideoOutputs.AppendElement() = container.forget();
+ RefPtr<MediaStreamVideoSink> sink = aSink;
+ STREAM_LOG(LogLevel::Info, ("MediaStream %p Adding MediaStreamVideoSink %p as output",
+ this, sink.get()));
+ *mVideoOutputs.AppendElement() = sink.forget();
}
void
-MediaStream::RemoveVideoOutputImpl(VideoFrameContainer* aContainer)
+MediaStream::RemoveVideoOutputImpl(MediaStreamVideoSink* aSink)
{
- STREAM_LOG(LogLevel::Info, ("MediaStream %p Removing VideoFrameContainer %p as output",
- this, aContainer));
+ STREAM_LOG(LogLevel::Info, ("MediaStream %p Removing MediaStreamVideoSink %p as output",
+ this, aSink));
// Ensure that any frames currently queued for playback by the compositor
// are removed.
- aContainer->ClearFutureFrames();
- mVideoOutputs.RemoveElement(aContainer);
+ aSink->ClearFrames();
+ mVideoOutputs.RemoveElement(aSink);
}
void
-MediaStream::AddVideoOutput(VideoFrameContainer* aContainer)
+MediaStream::AddVideoOutput(MediaStreamVideoSink* aSink)
{
class Message : public ControlMessage {
public:
- Message(MediaStream* aStream, VideoFrameContainer* aContainer) :
- ControlMessage(aStream), mContainer(aContainer) {}
+ Message(MediaStream* aStream, MediaStreamVideoSink* aSink) :
+ ControlMessage(aStream), mSink(aSink) {}
void Run() override
{
- mStream->AddVideoOutputImpl(mContainer.forget());
+ mStream->AddVideoOutputImpl(mSink.forget());
}
- RefPtr<VideoFrameContainer> mContainer;
+ RefPtr<MediaStreamVideoSink> mSink;
};
- GraphImpl()->AppendMessage(MakeUnique<Message>(this, aContainer));
+ GraphImpl()->AppendMessage(MakeUnique<Message>(this, aSink));
}
void
-MediaStream::RemoveVideoOutput(VideoFrameContainer* aContainer)
+MediaStream::RemoveVideoOutput(MediaStreamVideoSink* aSink)
{
class Message : public ControlMessage {
public:
- Message(MediaStream* aStream, VideoFrameContainer* aContainer) :
- ControlMessage(aStream), mContainer(aContainer) {}
+ Message(MediaStream* aStream, MediaStreamVideoSink* aSink) :
+ ControlMessage(aStream), mSink(aSink) {}
void Run() override
{
- mStream->RemoveVideoOutputImpl(mContainer);
+ mStream->RemoveVideoOutputImpl(mSink);
}
- RefPtr<VideoFrameContainer> mContainer;
+ RefPtr<MediaStreamVideoSink> mSink;
};
- GraphImpl()->AppendMessage(MakeUnique<Message>(this, aContainer));
+ GraphImpl()->AppendMessage(MakeUnique<Message>(this, aSink));
}
void
MediaStream::Suspend()
{
class Message : public ControlMessage {
public:
explicit Message(MediaStream* aStream) :
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -10,18 +10,19 @@
#include "mozilla/Mutex.h"
#include "mozilla/TaskQueue.h"
#include "mozilla/dom/AudioChannelBinding.h"
#include "AudioStream.h"
#include "nsTArray.h"
#include "nsIRunnable.h"
-#include "VideoFrameContainer.h"
#include "VideoSegment.h"
+#include "StreamTracks.h"
+#include "MediaStreamVideoSink.h"
#include "MainThreadUtils.h"
#include "StreamTracks.h"
#include "nsAutoPtr.h"
#include "nsAutoRef.h"
#include <speex/speex_resampler.h>
class nsIRunnable;
@@ -204,17 +205,17 @@ struct TrackBound
* Transitions into and out of the "blocked" and "finished" states are managed
* by the MediaStreamGraph on the media graph thread.
*
* We buffer media data ahead of the consumers' reading offsets. It is possible
* to have buffered data but still be blocked.
*
* Any stream can have its audio and video playing when requested. The media
* stream graph plays audio by constructing audio output streams as necessary.
- * Video is played by setting video frames into an VideoFrameContainer at the right
+ * Video is played by setting video frames into an MediaStreamVideoSink at the right
* time. To ensure video plays in sync with audio, make sure that the same
* stream is playing both the audio and video.
*
* The data in a stream is managed by StreamTracks. It consists of a set of
* tracks of various types that can start and end over time.
*
* Streams are explicitly managed. The client creates them via
* MediaStreamGraph::CreateInput/ProcessedMediaStream, and releases them by calling
@@ -286,20 +287,20 @@ public:
// separate. Since the stream is always playing the same contents, only
// a single audio output stream is used; the volumes are combined.
// Currently only the first enabled audio track is played.
// XXX change this so all enabled audio tracks are mixed and played.
virtual void AddAudioOutput(void* aKey);
virtual void SetAudioOutputVolume(void* aKey, float aVolume);
virtual void RemoveAudioOutput(void* aKey);
// Since a stream can be played multiple ways, we need to be able to
- // play to multiple VideoFrameContainers.
+ // play to multiple MediaStreamVideoSinks.
// Only the first enabled video track is played.
- virtual void AddVideoOutput(VideoFrameContainer* aContainer);
- virtual void RemoveVideoOutput(VideoFrameContainer* aContainer);
+ virtual void AddVideoOutput(MediaStreamVideoSink* aSink);
+ virtual void RemoveVideoOutput(MediaStreamVideoSink* aSink);
// Explicitly suspend. Useful for example if a media element is pausing
// and we need to stop its stream emitting its buffered data. As soon as the
// Suspend message reaches the graph, the stream stops processing. It
// ignores its inputs and produces silence/no video until Resumed. Its
// current time does not advance.
virtual void Suspend();
virtual void Resume();
// Events will be dispatched by calling methods of aListener.
@@ -418,18 +419,18 @@ public:
void SetAudioOutputVolumeImpl(void* aKey, float aVolume);
void AddAudioOutputImpl(void* aKey);
// Returns true if this stream has an audio output.
bool HasAudioOutput()
{
return !mAudioOutputs.IsEmpty();
}
void RemoveAudioOutputImpl(void* aKey);
- void AddVideoOutputImpl(already_AddRefed<VideoFrameContainer> aContainer);
- void RemoveVideoOutputImpl(VideoFrameContainer* aContainer);
+ void AddVideoOutputImpl(already_AddRefed<MediaStreamVideoSink> aSink);
+ void RemoveVideoOutputImpl(MediaStreamVideoSink* aSink);
void AddListenerImpl(already_AddRefed<MediaStreamListener> aListener);
void RemoveListenerImpl(MediaStreamListener* aListener);
void RemoveAllListenersImpl();
virtual void AddTrackListenerImpl(already_AddRefed<MediaStreamTrackListener> aListener,
TrackID aTrackID);
virtual void RemoveTrackListenerImpl(MediaStreamTrackListener* aListener,
TrackID aTrackID);
virtual void AddDirectTrackListenerImpl(already_AddRefed<DirectMediaStreamTrackListener> aListener,
@@ -578,17 +579,17 @@ protected:
// Client-set volume of this stream
struct AudioOutput {
explicit AudioOutput(void* aKey) : mKey(aKey), mVolume(1.0f) {}
void* mKey;
float mVolume;
};
nsTArray<AudioOutput> mAudioOutputs;
- nsTArray<RefPtr<VideoFrameContainer>> mVideoOutputs;
+ nsTArray<RefPtr<MediaStreamVideoSink>> mVideoOutputs;
// We record the last played video frame to avoid playing the frame again
// with a different frame id.
VideoFrame mLastPlayedVideoFrame;
nsTArray<RefPtr<MediaStreamListener> > mListeners;
nsTArray<TrackBound<MediaStreamTrackListener>> mTrackListeners;
nsTArray<MainThreadMediaStreamListener*> mMainThreadListeners;
nsTArray<TrackID> mDisabledTrackIDs;
--- a/dom/media/android/AndroidMediaReader.cpp
+++ b/dom/media/android/AndroidMediaReader.cpp
@@ -9,16 +9,17 @@
#include "MediaResource.h"
#include "VideoUtils.h"
#include "AndroidMediaDecoder.h"
#include "AndroidMediaPluginHost.h"
#include "MediaDecoderStateMachine.h"
#include "ImageContainer.h"
#include "AbstractMediaDecoder.h"
#include "gfx2DGlue.h"
+#include "VideoFrameContainer.h"
namespace mozilla {
using namespace mozilla::gfx;
using namespace mozilla::media;
typedef mozilla::layers::Image Image;
typedef mozilla::layers::PlanarYCbCrImage PlanarYCbCrImage;
--- a/dom/media/gtest/TestMediaFormatReader.cpp
+++ b/dom/media/gtest/TestMediaFormatReader.cpp
@@ -9,16 +9,17 @@
#include "mozilla/TaskQueue.h"
#include "ImageContainer.h"
#include "Layers.h"
#include "MediaData.h"
#include "MediaFormatReader.h"
#include "MP4Decoder.h"
#include "MockMediaDecoderOwner.h"
#include "MockMediaResource.h"
+#include "VideoFrameContainer.h"
using namespace mozilla;
using namespace mozilla::dom;
class MockMP4Decoder : public MP4Decoder
{
public:
MockMP4Decoder()
--- a/dom/media/ogg/OggReader.cpp
+++ b/dom/media/ogg/OggReader.cpp
@@ -20,16 +20,17 @@ extern "C" {
#include "mozilla/TimeStamp.h"
#include "VorbisUtils.h"
#include "MediaMetadataManager.h"
#include "nsAutoPtr.h"
#include "nsISeekableStream.h"
#include "gfx2DGlue.h"
#include "mozilla/Telemetry.h"
#include "nsPrintfCString.h"
+#include "VideoFrameContainer.h"
using namespace mozilla::gfx;
using namespace mozilla::media;
namespace mozilla {
// On B2G estimate the buffered ranges rather than calculating them explicitly.
// This prevents us doing I/O on the main thread, which is prohibited in B2G.
--- a/dom/media/omx/MediaOmxReader.cpp
+++ b/dom/media/omx/MediaOmxReader.cpp
@@ -12,16 +12,17 @@
#include "VideoUtils.h"
#include "MediaOmxDecoder.h"
#include "AbstractMediaDecoder.h"
#include "AudioChannelService.h"
#include "OmxDecoder.h"
#include "MPAPI.h"
#include "gfx2DGlue.h"
#include "MediaStreamSource.h"
+#include "VideoFrameContainer.h"
#define MAX_DROPPED_FRAMES 25
// Try not to spend more than this much time in a single call to DecodeVideoFrame.
#define MAX_VIDEO_DECODE_SECONDS 0.1
using namespace mozilla::gfx;
using namespace mozilla::media;
using namespace android;