--- a/dom/media/AudioCaptureStream.cpp
+++ b/dom/media/AudioCaptureStream.cpp
@@ -25,18 +25,21 @@ using namespace mozilla::dom;
using namespace mozilla::gfx;
namespace mozilla
{
// We are mixing to mono until PeerConnection can accept stereo
static const uint32_t MONO = 1;
-AudioCaptureStream::AudioCaptureStream(TrackID aTrackId, AbstractThread* aMainThread)
- : ProcessedMediaStream(aMainThread), mTrackId(aTrackId), mStarted(false), mTrackCreated(false)
+AudioCaptureStream::AudioCaptureStream(TrackID aTrackId)
+ : ProcessedMediaStream()
+ , mTrackId(aTrackId)
+ , mStarted(false)
+ , mTrackCreated(false)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_COUNT_CTOR(AudioCaptureStream);
mMixer.AddCallback(this);
}
AudioCaptureStream::~AudioCaptureStream()
{
--- a/dom/media/AudioCaptureStream.h
+++ b/dom/media/AudioCaptureStream.h
@@ -19,17 +19,17 @@ class DOMMediaStream;
/**
* See MediaStreamGraph::CreateAudioCaptureStream.
*/
class AudioCaptureStream : public ProcessedMediaStream,
public MixerCallbackReceiver
{
public:
- AudioCaptureStream(TrackID aTrackId, AbstractThread* aMainThread);
+ AudioCaptureStream(TrackID aTrackId);
virtual ~AudioCaptureStream();
void Start();
void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
protected:
void MixerCallback(AudioDataValue* aMixedBuffer, AudioSampleFormat aFormat,
--- a/dom/media/DOMMediaStream.h
+++ b/dom/media/DOMMediaStream.h
@@ -585,18 +585,16 @@ public:
// being destroyed, so we don't hold on to a dead pointer. Main thread only.
void RegisterTrackListener(TrackListener* aListener);
// Unregisters a track listener from this MediaStream. The caller must call
// UnregisterTrackListener before being destroyed, so we don't hold on to
// a dead pointer. Main thread only.
void UnregisterTrackListener(TrackListener* aListener);
- AbstractThread* AbstractMainThread() const { return mAbstractMainThread; }
-
protected:
virtual ~DOMMediaStream();
void Destroy();
void InitSourceStream(MediaStreamGraph* aGraph);
void InitTrackUnionStream(MediaStreamGraph* aGraph);
void InitAudioCaptureStream(nsIPrincipal* aPrincipal, MediaStreamGraph* aGraph);
@@ -751,17 +749,16 @@ private:
// Principal identifying who may access the collected contents of this stream.
// If null, this stream can be used by anyone because it has no content yet.
nsCOMPtr<nsIPrincipal> mPrincipal;
// Video principal is used by video element as access is requested to its
// image data.
nsCOMPtr<nsIPrincipal> mVideoPrincipal;
nsTArray<dom::PrincipalChangeObserver<DOMMediaStream>*> mPrincipalChangeObservers;
CORSMode mCORSMode;
- const RefPtr<AbstractThread> mAbstractMainThread;
};
NS_DEFINE_STATIC_IID_ACCESSOR(DOMMediaStream,
NS_DOMMEDIASTREAM_IID)
#define NS_DOMLOCALMEDIASTREAM_IID \
{ 0xb1437260, 0xec61, 0x4dfa, \
{ 0x92, 0x54, 0x04, 0x44, 0xe2, 0xb5, 0x94, 0x9c } }
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -1079,18 +1079,17 @@ public:
if (mAudioDevice &&
mAudioDevice->GetMediaSource() == MediaSourceEnum::AudioCapture) {
// It should be possible to pipe the capture stream to anything. CORS is
// not a problem here, we got explicit user content.
nsCOMPtr<nsIPrincipal> principal = window->GetExtantDoc()->NodePrincipal();
domStream =
DOMMediaStream::CreateAudioCaptureStreamAsInput(window, principal, msg);
- stream = msg->CreateSourceStream(
- globalWindow->AbstractMainThreadFor(TaskCategory::Other)); // Placeholder
+ stream = msg->CreateSourceStream(); // Placeholder
msg->RegisterCaptureStreamForWindow(
mWindowID, domStream->GetInputStream()->AsProcessedStream());
window->SetAudioCapture(true);
} else {
class LocalTrackSource : public MediaStreamTrackSource
{
public:
LocalTrackSource(nsIPrincipal* aPrincipal,
--- a/dom/media/MediaRecorder.cpp
+++ b/dom/media/MediaRecorder.cpp
@@ -428,17 +428,16 @@ class MediaRecorder::Session: public nsI
public:
Session(MediaRecorder* aRecorder, int32_t aTimeSlice)
: mRecorder(aRecorder)
, mTimeSlice(aTimeSlice)
, mStopIssued(false)
, mIsStartEventFired(false)
, mNeedSessionEndTask(true)
, mSelectedVideoTrackID(TRACK_NONE)
- , mAbstractMainThread(aRecorder->mAbstractMainThread)
{
MOZ_ASSERT(NS_IsMainThread());
uint32_t maxMem = Preferences::GetUint("media.recorder.max_memory",
MAX_ALLOW_MEMORY_BUFFER);
mEncodedBufferCache = new EncodedBufferCache(maxMem);
mLastBlobTimeStamp = TimeStamp::Now();
}
@@ -484,17 +483,17 @@ public:
void Start()
{
LOG(LogLevel::Debug, ("Session.Start %p", this));
MOZ_ASSERT(NS_IsMainThread());
// Create a Track Union Stream
MediaStreamGraph* gm = mRecorder->GetSourceMediaStream()->Graph();
TrackRate trackRate = gm->GraphRate();
- mTrackUnionStream = gm->CreateTrackUnionStream(mAbstractMainThread);
+ mTrackUnionStream = gm->CreateTrackUnionStream();
MOZ_ASSERT(mTrackUnionStream, "CreateTrackUnionStream failed");
mTrackUnionStream->SetAutofinish(true);
DOMMediaStream* domStream = mRecorder->Stream();
if (domStream) {
// Get the available tracks from the DOMMediaStream.
// The callback will report back tracks that we have to connect to
@@ -947,17 +946,16 @@ private:
bool mStopIssued;
// Indicate the session had fire start event. Encoding thread only.
bool mIsStartEventFired;
// False if the InitEncoder called successfully, ensure the
// ExtractRunnable/DestroyRunnable will end the session.
// Main thread only.
bool mNeedSessionEndTask;
TrackID mSelectedVideoTrackID;
- const RefPtr<AbstractThread> mAbstractMainThread;
};
NS_IMPL_ISUPPORTS(MediaRecorder::Session, nsIObserver)
MediaRecorder::~MediaRecorder()
{
if (mPipeStream != nullptr) {
mInputPort->Destroy();
@@ -966,31 +964,29 @@ MediaRecorder::~MediaRecorder()
LOG(LogLevel::Debug, ("~MediaRecorder (%p)", this));
UnRegisterActivityObserver();
}
MediaRecorder::MediaRecorder(DOMMediaStream& aSourceMediaStream,
nsPIDOMWindowInner* aOwnerWindow)
: DOMEventTargetHelper(aOwnerWindow)
, mState(RecordingState::Inactive)
- , mAbstractMainThread(aSourceMediaStream.AbstractMainThread())
{
MOZ_ASSERT(aOwnerWindow);
MOZ_ASSERT(aOwnerWindow->IsInnerWindow());
mDOMStream = &aSourceMediaStream;
RegisterActivityObserver();
}
MediaRecorder::MediaRecorder(AudioNode& aSrcAudioNode,
uint32_t aSrcOutput,
nsPIDOMWindowInner* aOwnerWindow)
: DOMEventTargetHelper(aOwnerWindow)
, mState(RecordingState::Inactive)
- , mAbstractMainThread(aSrcAudioNode.AbstractMainThread())
{
MOZ_ASSERT(aOwnerWindow);
MOZ_ASSERT(aOwnerWindow->IsInnerWindow());
// Only AudioNodeStream of kind EXTERNAL_STREAM stores output audio data in
// the track (see AudioNodeStream::AdvanceOutputSegment()). That means track
// union stream in recorder session won't be able to copy data from the
// stream of non-destination node. Create a pipe stream in this case.
--- a/dom/media/MediaRecorder.h
+++ b/dom/media/MediaRecorder.h
@@ -154,17 +154,16 @@ protected:
// It specifies the container format as well as the audio and video capture formats.
nsString mMimeType;
uint32_t mAudioBitsPerSecond;
uint32_t mVideoBitsPerSecond;
uint32_t mBitsPerSecond;
- const RefPtr<AbstractThread> mAbstractMainThread;
private:
// Register MediaRecorder into Document to listen the activity changes.
void RegisterActivityObserver();
void UnRegisterActivityObserver();
bool CheckPermission(const nsString &aType);
};
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -1007,19 +1007,21 @@ MediaStreamGraphImpl::OpenAudioInputImpl
}
nsresult
MediaStreamGraphImpl::OpenAudioInput(int aID,
AudioDataListener *aListener)
{
// So, so, so annoying. Can't AppendMessage except on Mainthread
if (!NS_IsMainThread()) {
- RefPtr<nsIRunnable> runnable = WrapRunnable(this,
- &MediaStreamGraphImpl::OpenAudioInput,
- aID, RefPtr<AudioDataListener>(aListener));
+ RefPtr<nsIRunnable> runnable =
+ WrapRunnable(this,
+ &MediaStreamGraphImpl::OpenAudioInput,
+ aID,
+ RefPtr<AudioDataListener>(aListener));
mAbstractMainThread->Dispatch(runnable.forget());
return NS_OK;
}
class Message : public ControlMessage {
public:
Message(MediaStreamGraphImpl *aGraph, int aID,
AudioDataListener *aListener) :
ControlMessage(nullptr), mGraph(aGraph), mID(aID), mListener(aListener) {}
@@ -1078,19 +1080,20 @@ MediaStreamGraphImpl::CloseAudioInputImp
}
}
void
MediaStreamGraphImpl::CloseAudioInput(AudioDataListener *aListener)
{
// So, so, so annoying. Can't AppendMessage except on Mainthread
if (!NS_IsMainThread()) {
- RefPtr<nsIRunnable> runnable = WrapRunnable(this,
- &MediaStreamGraphImpl::CloseAudioInput,
- RefPtr<AudioDataListener>(aListener));
+ RefPtr<nsIRunnable> runnable =
+ WrapRunnable(this,
+ &MediaStreamGraphImpl::CloseAudioInput,
+ RefPtr<AudioDataListener>(aListener));
mAbstractMainThread->Dispatch(runnable.forget());
return;
}
class Message : public ControlMessage {
public:
Message(MediaStreamGraphImpl *aGraph, AudioDataListener *aListener) :
ControlMessage(nullptr), mGraph(aGraph), mListener(aListener) {}
virtual void Run()
@@ -1878,38 +1881,38 @@ MediaStreamGraphImpl::AppendMessage(Uniq
}
return;
}
mCurrentTaskMessageQueue.AppendElement(Move(aMessage));
EnsureRunInStableState();
}
-void MediaStreamGraphImpl::Dispatch(already_AddRefed<nsIRunnable>&& aRunnable)
+void
+MediaStreamGraphImpl::Dispatch(already_AddRefed<nsIRunnable>&& aRunnable)
{
mAbstractMainThread->Dispatch(std::move(aRunnable));
}
-MediaStream::MediaStream(AbstractThread* aMainThread)
+MediaStream::MediaStream()
: mTracksStartTime(0)
, mStartBlocking(GRAPH_TIME_MAX)
, mSuspendedCount(0)
, mFinished(false)
, mNotifiedFinished(false)
, mNotifiedBlocked(false)
, mHasCurrentData(false)
, mNotifiedHasCurrentData(false)
, mMainThreadCurrentTime(0)
, mMainThreadFinished(false)
, mFinishedNotificationSent(false)
, mMainThreadDestroyed(false)
, mNrOfMainThreadUsers(0)
, mGraph(nullptr)
, mAudioChannelType(dom::AudioChannel::Normal)
- , mAbstractMainThread(aMainThread)
{
MOZ_COUNT_CTOR(MediaStream);
}
MediaStream::~MediaStream()
{
MOZ_COUNT_DTOR(MediaStream);
NS_ASSERTION(mMainThreadDestroyed, "Should have been destroyed already");
@@ -1979,16 +1982,22 @@ MediaStream::GraphImpl()
}
MediaStreamGraph*
MediaStream::Graph()
{
return mGraph;
}
+AbstractThread*
+MediaStream::AbstractMainThread()
+{
+ return mGraph->AbstractMainThread();
+}
+
void
MediaStream::SetGraphImpl(MediaStreamGraphImpl* aGraph)
{
MOZ_ASSERT(!mGraph, "Should only be called once");
mGraph = aGraph;
mAudioChannelType = aGraph->AudioChannel();
mTracks.InitGraphRate(aGraph->GraphRate());
}
@@ -2547,43 +2556,37 @@ MediaStream::RunAfterPendingUpdates(alre
// runnable will run in finite time.
if (!(graph->mRealtime || graph->mNonRealtimeProcessing)) {
runnable->Run();
return;
}
class Message : public ControlMessage {
public:
- Message(MediaStream* aStream,
- already_AddRefed<nsIRunnable> aRunnable,
- AbstractThread* aMainThread)
+ Message(MediaStream* aStream, already_AddRefed<nsIRunnable> aRunnable)
: ControlMessage(aStream)
, mRunnable(aRunnable)
- , mAbstractMainThread(aMainThread)
- {}
+ {}
void Run() override
{
- mStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- mRunnable.forget());
+ mStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ mRunnable.forget());
}
void RunDuringShutdown() override
{
// Don't run mRunnable now as it may call AppendMessage() which would
// assume that there are no remaining controlMessagesToRunDuringShutdown.
MOZ_ASSERT(NS_IsMainThread());
mStream->GraphImpl()->Dispatch(mRunnable.forget());
}
private:
nsCOMPtr<nsIRunnable> mRunnable;
- const RefPtr<AbstractThread> mAbstractMainThread;
};
- graph->AppendMessage(
- MakeUnique<Message>(this, runnable.forget(), mAbstractMainThread));
+ graph->AppendMessage(MakeUnique<Message>(this, runnable.forget()));
}
void
MediaStream::SetTrackEnabledImpl(TrackID aTrackID, DisabledTrackMode aMode)
{
if (aMode == DisabledTrackMode::ENABLED) {
for (int32_t i = mDisabledTracks.Length() - 1; i >= 0; --i) {
if (aTrackID == mDisabledTracks[i].mTrackID) {
@@ -2685,26 +2688,26 @@ MediaStream::AddMainThreadListener(MainT
private:
~NotifyRunnable() {}
RefPtr<MediaStream> mStream;
};
nsCOMPtr<nsIRunnable> runnable = new NotifyRunnable(this);
- mAbstractMainThread->Dispatch(runnable.forget());
+ GraphImpl()->Dispatch(runnable.forget());
}
-SourceMediaStream::SourceMediaStream(AbstractThread* aMainThread) :
- MediaStream(aMainThread),
- mMutex("mozilla::media::SourceMediaStream"),
- mUpdateKnownTracksTime(0),
- mPullEnabled(false),
- mUpdateFinished(false),
- mNeedsMixing(false)
+SourceMediaStream::SourceMediaStream()
+ : MediaStream()
+ , mMutex("mozilla::media::SourceMediaStream")
+ , mUpdateKnownTracksTime(0)
+ , mPullEnabled(false)
+ , mUpdateFinished(false)
+ , mNeedsMixing(false)
{
}
nsresult
SourceMediaStream::OpenAudioInput(int aID,
AudioDataListener *aListener)
{
if (GraphImpl()) {
@@ -3284,53 +3287,53 @@ MediaInputPort::BlockSourceTrackIdImpl(T
already_AddRefed<Pledge<bool>>
MediaInputPort::BlockSourceTrackId(TrackID aTrackId, BlockingMode aBlockingMode)
{
class Message : public ControlMessage {
public:
Message(MediaInputPort* aPort,
TrackID aTrackId,
BlockingMode aBlockingMode,
- already_AddRefed<nsIRunnable> aRunnable,
- AbstractThread* aMainThread)
- : ControlMessage(aPort->GetDestination()),
- mPort(aPort), mTrackId(aTrackId), mBlockingMode(aBlockingMode),
- mRunnable(aRunnable), mAbstractMainThread(aMainThread) {}
+ already_AddRefed<nsIRunnable> aRunnable)
+ : ControlMessage(aPort->GetDestination())
+ , mPort(aPort)
+ , mTrackId(aTrackId)
+ , mBlockingMode(aBlockingMode)
+ , mRunnable(aRunnable)
+ {
+ }
void Run() override
{
mPort->BlockSourceTrackIdImpl(mTrackId, mBlockingMode);
if (mRunnable) {
- mStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- mRunnable.forget());
+ mStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ mRunnable.forget());
}
}
void RunDuringShutdown() override
{
Run();
}
RefPtr<MediaInputPort> mPort;
TrackID mTrackId;
BlockingMode mBlockingMode;
nsCOMPtr<nsIRunnable> mRunnable;
- const RefPtr<AbstractThread> mAbstractMainThread;
};
MOZ_ASSERT(IsTrackIDExplicit(aTrackId),
"Only explicit TrackID is allowed");
auto pledge = MakeRefPtr<Pledge<bool>>();
nsCOMPtr<nsIRunnable> runnable = NewRunnableFrom([pledge]() {
MOZ_ASSERT(NS_IsMainThread());
pledge->Resolve(true);
return NS_OK;
});
- GraphImpl()->AppendMessage(MakeUnique<Message>(this, aTrackId, aBlockingMode,
- runnable.forget(),
- mAbstractMainThread));
+ GraphImpl()->AppendMessage(
+ MakeUnique<Message>(this, aTrackId, aBlockingMode, runnable.forget()));
return pledge.forget();
}
already_AddRefed<MediaInputPort>
ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, TrackID aTrackID,
TrackID aDestTrackID,
uint16_t aInputNumber, uint16_t aOutputNumber,
nsTArray<TrackID>* aBlockedTracks)
@@ -3358,19 +3361,18 @@ ProcessedMediaStream::AllocateInputPort(
MOZ_ASSERT(aStream->GraphImpl() == GraphImpl());
MOZ_ASSERT(aTrackID == TRACK_ANY || IsTrackIDExplicit(aTrackID),
"Only TRACK_ANY and explicit ID are allowed for source track");
MOZ_ASSERT(aDestTrackID == TRACK_ANY || IsTrackIDExplicit(aDestTrackID),
"Only TRACK_ANY and explicit ID are allowed for destination track");
MOZ_ASSERT(aTrackID != TRACK_ANY || aDestTrackID == TRACK_ANY,
"Generic MediaInputPort cannot produce a single destination track");
- RefPtr<MediaInputPort> port =
- new MediaInputPort(aStream, aTrackID, this, aDestTrackID,
- aInputNumber, aOutputNumber, mAbstractMainThread);
+ RefPtr<MediaInputPort> port = new MediaInputPort(
+ aStream, aTrackID, this, aDestTrackID, aInputNumber, aOutputNumber);
if (aBlockedTracks) {
for (TrackID trackID : *aBlockedTracks) {
port->BlockSourceTrackIdImpl(trackID, BlockingMode::CREATION);
}
}
port->SetGraphImpl(GraphImpl());
GraphImpl()->AppendMessage(MakeUnique<Message>(port));
return port.forget();
@@ -3450,18 +3452,18 @@ MediaStreamGraphImpl::MediaStreamGraphIm
, mLatencyLog(AsyncLatencyLogger::Get())
#ifdef MOZ_WEBRTC
, mFarendObserverRef(nullptr)
#endif
, mSelfRef(this)
#ifdef DEBUG
, mCanRunMessagesSynchronously(false)
#endif
+ , mAbstractMainThread(aMainThread)
, mAudioChannel(aChannel)
- , mAbstractMainThread(aMainThread)
{
if (mRealtime) {
if (aDriverRequested == AUDIO_THREAD_DRIVER) {
AudioCallbackDriver* driver = new AudioCallbackDriver(this);
mDriver = driver;
} else {
mDriver = new SystemClockDriver(this);
}
@@ -3469,16 +3471,23 @@ MediaStreamGraphImpl::MediaStreamGraphIm
mDriver = new OfflineClockDriver(this, MEDIA_GRAPH_TARGET_PERIOD_MS);
}
mLastMainThreadUpdate = TimeStamp::Now();
RegisterWeakAsyncMemoryReporter(this);
}
+AbstractThread*
+MediaStreamGraph::AbstractMainThread()
+{
+ MOZ_ASSERT(static_cast<MediaStreamGraphImpl*>(this)->mAbstractMainThread);
+ return static_cast<MediaStreamGraphImpl*>(this)->mAbstractMainThread;
+}
+
void
MediaStreamGraphImpl::Destroy()
{
// First unregister from memory reporting.
UnregisterWeakMemoryReporter(this);
// Clear the self reference which will destroy this instance.
mSelfRef = nullptr;
@@ -3574,21 +3583,21 @@ MediaStreamGraph::GetInstance(MediaStrea
MediaStreamGraph*
MediaStreamGraph::CreateNonRealtimeInstance(TrackRate aSampleRate,
nsPIDOMWindowInner* aWindow)
{
NS_ASSERTION(NS_IsMainThread(), "Main thread only");
nsCOMPtr<nsIGlobalObject> parentObject = do_QueryInterface(aWindow);
- MediaStreamGraphImpl* graph =
- new MediaStreamGraphImpl(OFFLINE_THREAD_DRIVER,
- aSampleRate,
- AudioChannel::Normal,
- parentObject->AbstractMainThreadFor(TaskCategory::Other));
+ MediaStreamGraphImpl* graph = new MediaStreamGraphImpl(
+ OFFLINE_THREAD_DRIVER,
+ aSampleRate,
+ AudioChannel::Normal,
+ parentObject->AbstractMainThreadFor(TaskCategory::Other));
LOG(LogLevel::Debug, ("Starting up Offline MediaStreamGraph %p", graph));
return graph;
}
void
MediaStreamGraph::DestroyNonRealtimeInstance(MediaStreamGraph* aGraph)
@@ -3749,35 +3758,35 @@ FinishCollectReports(nsIHandleReportCall
}
#undef REPORT
manager->EndReport();
}
SourceMediaStream*
-MediaStreamGraph::CreateSourceStream(AbstractThread* aMainThread)
+MediaStreamGraph::CreateSourceStream()
{
- SourceMediaStream* stream = new SourceMediaStream(aMainThread);
+ SourceMediaStream* stream = new SourceMediaStream();
AddStream(stream);
return stream;
}
ProcessedMediaStream*
-MediaStreamGraph::CreateTrackUnionStream(AbstractThread* aMainThread)
+MediaStreamGraph::CreateTrackUnionStream()
{
- TrackUnionStream* stream = new TrackUnionStream(aMainThread);
+ TrackUnionStream* stream = new TrackUnionStream();
AddStream(stream);
return stream;
}
ProcessedMediaStream*
-MediaStreamGraph::CreateAudioCaptureStream(TrackID aTrackId, AbstractThread* aMainThread)
+MediaStreamGraph::CreateAudioCaptureStream(TrackID aTrackId)
{
- AudioCaptureStream* stream = new AudioCaptureStream(aTrackId, aMainThread);
+ AudioCaptureStream* stream = new AudioCaptureStream(aTrackId);
AddStream(stream);
return stream;
}
void
MediaStreamGraph::AddStream(MediaStream* aStream)
{
NS_ADDREF(aStream);
@@ -4170,19 +4179,19 @@ MediaStreamGraphImpl::ConnectToCaptureSt
ProcessedMediaStream* sink = mWindowCaptureStreams[i].mCaptureStreamSink;
return sink->AllocateInputPort(aMediaStream);
}
}
return nullptr;
}
void
-MediaStreamGraph::
-DispatchToMainThreadAfterStreamStateUpdate(AbstractThread* aMainThread,
- already_AddRefed<nsIRunnable> aRunnable)
+MediaStreamGraph::DispatchToMainThreadAfterStreamStateUpdate(
+ already_AddRefed<nsIRunnable> aRunnable)
{
- MOZ_ASSERT(aMainThread);
AssertOnGraphThreadOrNotRunning();
*mPendingUpdateRunnables.AppendElement() =
- aMainThread->CreateDirectTaskDrainer(Move(aRunnable));
+ static_cast<MediaStreamGraphImpl*>(this)
+ ->AbstractMainThread()
+ ->CreateDirectTaskDrainer(Move(aRunnable));
}
} // namespace mozilla
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -72,17 +72,16 @@ namespace media {
*
* Media decoding, audio processing and media playback use thread-safe APIs to
* the media graph to ensure they can continue while the main thread is blocked.
*
* When the graph is changed, we may need to throw out buffered data and
* reprocess it. This is triggered automatically by the MediaStreamGraph.
*/
-class AbstractThread;
class AudioNodeEngine;
class AudioNodeExternalInputStream;
class AudioNodeStream;
class MediaInputPort;
class MediaStream;
class MediaStreamGraph;
class MediaStreamGraphImpl;
class ProcessedMediaStream;
@@ -250,29 +249,33 @@ struct TrackBound
#undef GetCurrentTime
#endif
class MediaStream : public mozilla::LinkedListElement<MediaStream>
{
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStream)
- explicit MediaStream(AbstractThread* aMainThread);
+ explicit MediaStream();
protected:
// Protected destructor, to discourage deletion outside of Release():
virtual ~MediaStream();
public:
/**
* Returns the graph that owns this stream.
*/
MediaStreamGraphImpl* GraphImpl();
MediaStreamGraph* Graph();
/**
+ * Return the right main thread for this MediaStream.
+ */
+ AbstractThread* AbstractMainThread();
+ /**
* Sets the graph that owns this stream. Should only be called once.
*/
void SetGraphImpl(MediaStreamGraphImpl* aGraph);
void SetGraphImpl(MediaStreamGraph* aGraph);
/**
* Returns sample rate of the graph.
*/
@@ -660,30 +663,28 @@ protected:
bool mFinishedNotificationSent;
bool mMainThreadDestroyed;
int mNrOfMainThreadUsers;
// Our media stream graph. null if destroyed on the graph thread.
MediaStreamGraphImpl* mGraph;
dom::AudioChannel mAudioChannelType;
-
- const RefPtr<AbstractThread> mAbstractMainThread;
};
/**
* This is a stream into which a decoder can write audio and video.
*
* Audio and video can be written on any thread, but you probably want to
* always write from the same thread to avoid unexpected interleavings.
*/
class SourceMediaStream : public MediaStream
{
public:
- explicit SourceMediaStream(AbstractThread* aMainThread);
+ explicit SourceMediaStream();
SourceMediaStream* AsSourceStream() override { return this; }
// Media graph thread only
// Users of audio inputs go through the stream so it can track when the
// last stream referencing an input goes away, so it can close the cubeb
// input. Also note: callable on any thread (though it bounces through
@@ -952,28 +953,29 @@ enum class BlockingMode
* clears its reference (the last main-thread reference to the object). When
* the Destroy message is processed on the graph manager thread we disconnect
* the port and drop the graph's reference, destroying the object.
*/
class MediaInputPort final
{
private:
// Do not call this constructor directly. Instead call aDest->AllocateInputPort.
- MediaInputPort(MediaStream* aSource, TrackID& aSourceTrack,
- ProcessedMediaStream* aDest, TrackID& aDestTrack,
- uint16_t aInputNumber, uint16_t aOutputNumber,
- AbstractThread* aMainThread)
+ MediaInputPort(MediaStream* aSource,
+ TrackID& aSourceTrack,
+ ProcessedMediaStream* aDest,
+ TrackID& aDestTrack,
+ uint16_t aInputNumber,
+ uint16_t aOutputNumber)
: mSource(aSource)
, mSourceTrack(aSourceTrack)
, mDest(aDest)
, mDestTrack(aDestTrack)
, mInputNumber(aInputNumber)
, mOutputNumber(aOutputNumber)
, mGraph(nullptr)
- , mAbstractMainThread(aMainThread)
{
MOZ_COUNT_CTOR(MediaInputPort);
}
// Private destructor, to discourage deletion outside of Release():
~MediaInputPort()
{
MOZ_COUNT_DTOR(MediaInputPort);
@@ -1106,30 +1108,30 @@ private:
const uint16_t mInputNumber;
const uint16_t mOutputNumber;
typedef Pair<TrackID, BlockingMode> BlockedTrack;
nsTArray<BlockedTrack> mBlockedTracks;
// Our media stream graph
MediaStreamGraphImpl* mGraph;
-
- const RefPtr<AbstractThread> mAbstractMainThread;
};
/**
* This stream processes zero or more input streams in parallel to produce
* its output. The details of how the output is produced are handled by
* subclasses overriding the ProcessInput method.
*/
class ProcessedMediaStream : public MediaStream
{
public:
- explicit ProcessedMediaStream(AbstractThread* aMainThread)
- : MediaStream(aMainThread), mAutofinish(false), mCycleMarker(0)
+ explicit ProcessedMediaStream()
+ : MediaStream()
+ , mAutofinish(false)
+ , mCycleMarker(0)
{}
// Control API.
/**
* Allocates a new input port attached to source aStream.
* This stream can be removed by calling MediaInputPort::Remove().
*
* The input port is tied to aTrackID in the source stream.
@@ -1243,17 +1245,17 @@ protected:
bool mAutofinish;
// After UpdateStreamOrder(), mCycleMarker is either 0 or 1 to indicate
// whether this stream is in a muted cycle. During ordering it can contain
// other marker values - see MediaStreamGraphImpl::UpdateStreamOrder().
uint32_t mCycleMarker;
};
/**
- * There can be multiple MediaStreamGraph per process: one per AudioChannel.
+ * There is a single MediaStreamGraph per window.
* Additionaly, each OfflineAudioContext object creates its own MediaStreamGraph
* object too..
*/
class MediaStreamGraph
{
public:
// We ensure that the graph current time advances in multiples of
@@ -1272,53 +1274,58 @@ public:
OFFLINE_THREAD_DRIVER
};
static const uint32_t AUDIO_CALLBACK_DRIVER_SHUTDOWN_TIMEOUT = 20*1000;
// Main thread only
static MediaStreamGraph* GetInstance(GraphDriverType aGraphDriverRequested,
dom::AudioChannel aChannel,
nsPIDOMWindowInner* aWindow);
- static MediaStreamGraph* CreateNonRealtimeInstance(TrackRate aSampleRate,
- nsPIDOMWindowInner* aWindowId);
+ static MediaStreamGraph* CreateNonRealtimeInstance(
+ TrackRate aSampleRate,
+ nsPIDOMWindowInner* aWindowId);
+
+ // Return the correct main thread for this graph. This always returns
+ // something that is valid. Thread safe.
+ AbstractThread* AbstractMainThread();
+
// Idempotent
static void DestroyNonRealtimeInstance(MediaStreamGraph* aGraph);
virtual nsresult OpenAudioInput(int aID,
AudioDataListener *aListener) {
return NS_ERROR_FAILURE;
}
virtual void CloseAudioInput(AudioDataListener *aListener) {}
// Control API.
/**
* Create a stream that a media decoder (or some other source of
* media data, such as a camera) can write to.
*/
- SourceMediaStream* CreateSourceStream(AbstractThread* aMainThread);
+ SourceMediaStream* CreateSourceStream();
/**
* Create a stream that will form the union of the tracks of its input
* streams.
* A TrackUnionStream contains all the tracks of all its input streams.
* Adding a new input stream makes that stream's tracks immediately appear as new
* tracks starting at the time the input stream was added.
* Removing an input stream makes the output tracks corresponding to the
* removed tracks immediately end.
* For each added track, the track ID of the output track is the track ID
* of the input track or one plus the maximum ID of all previously added
* tracks, whichever is greater.
* TODO at some point we will probably need to add API to select
* particular tracks of each input stream.
*/
- ProcessedMediaStream* CreateTrackUnionStream(AbstractThread* aMainThread);
+ ProcessedMediaStream* CreateTrackUnionStream();
/**
* Create a stream that will mix all its audio input.
*/
- ProcessedMediaStream* CreateAudioCaptureStream(TrackID aTrackId,
- AbstractThread* aMainThread);
+ ProcessedMediaStream* CreateAudioCaptureStream(TrackID aTrackId);
/**
* Add a new stream to the graph. Main thread.
*/
void AddStream(MediaStream* aStream);
/* From the main thread, ask the MSG to send back an event when the graph
* thread is running, and audio is being processed. */
@@ -1346,29 +1353,19 @@ public:
/**
* Media graph thread only.
* Dispatches a runnable that will run on the main thread after all
* main-thread stream state has been next updated.
*
* Should only be called during MediaStreamListener callbacks or during
* ProcessedMediaStream::ProcessInput().
- *
- * |aMainThread| is the corresponding AbstractThread on the main thread to
- * drain the direct tasks generated by |aRunnable|.
- * Note: The reasons for assigning proper |aMainThread| are
- * - MSG serves media elements in multiple windows run on main thread.
- * - DocGroup-specific AbstractMainThread is introduced to cluster the tasks
- * of the same window for prioritizing tasks among different windows.
- * - Proper |aMainThread| ensures that tasks dispatched to the main thread are
- * clustered to the right queue and are executed in right order.
*/
- virtual void
- DispatchToMainThreadAfterStreamStateUpdate(AbstractThread* aMainThread,
- already_AddRefed<nsIRunnable> aRunnable);
+ virtual void DispatchToMainThreadAfterStreamStateUpdate(
+ already_AddRefed<nsIRunnable> aRunnable);
/**
* Returns graph sample rate in Hz.
*/
TrackRate GraphRate() const { return mSampleRate; }
void RegisterCaptureStreamForWindow(uint64_t aWindowId,
ProcessedMediaStream* aCaptureStream);
--- a/dom/media/MediaStreamGraphImpl.h
+++ b/dom/media/MediaStreamGraphImpl.h
@@ -815,16 +815,17 @@ public:
#ifdef MOZ_WEBRTC
RefPtr<AudioOutputObserver> mFarendObserverRef;
#endif
dom::AudioChannel AudioChannel() const { return mAudioChannel; }
// used to limit graph shutdown time
nsCOMPtr<nsITimer> mShutdownTimer;
+ const RefPtr<AbstractThread> mAbstractMainThread;
private:
virtual ~MediaStreamGraphImpl();
MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf)
/**
* This class uses manual memory management, and all pointers to it are raw
@@ -848,14 +849,13 @@ private:
#ifdef DEBUG
/**
* Used to assert when AppendMessage() runs ControlMessages synchronously.
*/
bool mCanRunMessagesSynchronously;
#endif
dom::AudioChannel mAudioChannel;
- const RefPtr<AbstractThread> mAbstractMainThread;
};
} // namespace mozilla
#endif /* MEDIASTREAMGRAPHIMPL_H_ */
--- a/dom/media/MediaStreamTrack.cpp
+++ b/dom/media/MediaStreamTrack.cpp
@@ -83,17 +83,16 @@ NS_IMPL_CYCLE_COLLECTION_0(MediaStreamTr
* will be a combination of its old principal and all the new ones until the
* latest main thread principal matches the PrincipalHandle on the MSG thread.
*/
class MediaStreamTrack::PrincipalHandleListener : public MediaStreamTrackListener
{
public:
explicit PrincipalHandleListener(MediaStreamTrack* aTrack)
: mTrack(aTrack)
- , mAbstractMainThread(aTrack->mOwningStream->AbstractMainThread())
{}
void Forget()
{
MOZ_ASSERT(NS_IsMainThread());
mTrack = nullptr;
}
@@ -107,29 +106,27 @@ public:
mTrack->NotifyPrincipalHandleChanged(aNewPrincipalHandle);
}
void NotifyPrincipalHandleChanged(MediaStreamGraph* aGraph,
const PrincipalHandle& aNewPrincipalHandle) override
{
aGraph->DispatchToMainThreadAfterStreamStateUpdate(
- mAbstractMainThread,
NewRunnableMethod<StoreCopyPassByConstLRef<PrincipalHandle>>(
"dom::MediaStreamTrack::PrincipalHandleListener::"
"DoNotifyPrincipalHandleChanged",
this,
&PrincipalHandleListener::DoNotifyPrincipalHandleChanged,
aNewPrincipalHandle));
}
protected:
// These fields may only be accessed on the main thread
MediaStreamTrack* mTrack;
- const RefPtr<AbstractThread> mAbstractMainThread;
};
MediaStreamTrack::MediaStreamTrack(DOMMediaStream* aStream, TrackID aTrackID,
TrackID aInputTrackID,
MediaStreamTrackSource* aSource,
const MediaTrackConstraints& aConstraints)
: mOwningStream(aStream), mTrackID(aTrackID),
mInputTrackID(aInputTrackID), mSource(aSource),
--- a/dom/media/TrackUnionStream.cpp
+++ b/dom/media/TrackUnionStream.cpp
@@ -41,18 +41,19 @@ namespace mozilla {
#ifdef STREAM_LOG
#undef STREAM_LOG
#endif
LazyLogModule gTrackUnionStreamLog("TrackUnionStream");
#define STREAM_LOG(type, msg) MOZ_LOG(gTrackUnionStreamLog, type, msg)
-TrackUnionStream::TrackUnionStream(AbstractThread* aMainThread) :
- ProcessedMediaStream(aMainThread), mNextAvailableTrackID(1)
+TrackUnionStream::TrackUnionStream()
+ : ProcessedMediaStream()
+ , mNextAvailableTrackID(1)
{
}
void TrackUnionStream::RemoveInput(MediaInputPort* aPort)
{
STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p removing input %p", this, aPort));
for (int32_t i = mTrackMap.Length() - 1; i >= 0; --i) {
if (mTrackMap[i].mInputPort == aPort) {
--- a/dom/media/TrackUnionStream.h
+++ b/dom/media/TrackUnionStream.h
@@ -12,17 +12,17 @@
namespace mozilla {
/**
* See MediaStreamGraph::CreateTrackUnionStream.
*/
class TrackUnionStream : public ProcessedMediaStream {
public:
- explicit TrackUnionStream(AbstractThread* aMainThread);
+ explicit TrackUnionStream();
virtual TrackUnionStream* AsTrackUnionStream() override { return this; }
friend class DOMMediaStream;
void RemoveInput(MediaInputPort* aPort) override;
void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
void SetTrackEnabledImpl(TrackID aTrackID, DisabledTrackMode aMode) override;
--- a/dom/media/webaudio/AudioDestinationNode.cpp
+++ b/dom/media/webaudio/AudioDestinationNode.cpp
@@ -163,18 +163,17 @@ public:
mSampleRate, mBuffer.forget(), rv);
if (rv.Failed()) {
rv.SuppressException();
return;
}
aNode->ResolvePromise(renderedBuffer);
- mAbstractMainThread->Dispatch(do_AddRef(new OnCompleteTask(context,
- renderedBuffer)));
+ context->Dispatch(do_AddRef(new OnCompleteTask(context, renderedBuffer)));
context->OnStateChanged(nullptr, AudioContextState::Closed);
}
size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override
{
size_t amount = AudioNodeEngine::SizeOfExcludingThis(aMallocSizeOf);
if (mBuffer) {
@@ -256,19 +255,18 @@ public:
}
bool newInputMuted = aInput.IsNull() || aInput.IsMuted();
if (newInputMuted != mLastInputMuted) {
mLastInputMuted = newInputMuted;
RefPtr<InputMutedRunnable> runnable =
new InputMutedRunnable(aStream, newInputMuted);
- aStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- runnable.forget());
+ aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ runnable.forget());
}
}
bool IsActive() const override
{
// Keep processing to track stream time, which is used for all timelines
// associated with the same AudioContext. If there are no other engines
// for the AudioContext, then this could return false to suspend the
@@ -331,19 +329,21 @@ AudioDestinationNode::AudioDestinationNo
, mFramesToProduce(aLength)
, mAudioChannel(AudioChannel::Normal)
, mIsOffline(aIsOffline)
, mAudioChannelSuspended(false)
, mCaptured(false)
, mAudible(AudioChannelService::AudibleState::eAudible)
{
nsPIDOMWindowInner* window = aContext->GetParentObject();
- MediaStreamGraph* graph = aIsOffline ?
- MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate, window) :
- MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER, aChannel, window);
+ MediaStreamGraph* graph =
+ aIsOffline
+ ? MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate, window)
+ : MediaStreamGraph::GetInstance(
+ MediaStreamGraph::AUDIO_THREAD_DRIVER, aChannel, window);
AudioNodeEngine* engine = aIsOffline ?
new OfflineDestinationNodeEngine(this, aNumberOfChannels,
aLength, aSampleRate) :
static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
AudioNodeStream::Flags flags =
AudioNodeStream::NEED_MAIN_THREAD_CURRENT_TIME |
AudioNodeStream::NEED_MAIN_THREAD_FINISHED |
--- a/dom/media/webaudio/AudioNodeExternalInputStream.cpp
+++ b/dom/media/webaudio/AudioNodeExternalInputStream.cpp
@@ -10,38 +10,38 @@
#include "AudioChannelFormat.h"
#include "mozilla/dom/MediaStreamAudioSourceNode.h"
using namespace mozilla::dom;
namespace mozilla {
AudioNodeExternalInputStream::AudioNodeExternalInputStream(
- AudioNodeEngine* aEngine, TrackRate aSampleRate, AbstractThread* aMainThread)
- : AudioNodeStream(aEngine, NO_STREAM_FLAGS, aSampleRate, aMainThread)
+ AudioNodeEngine* aEngine,
+ TrackRate aSampleRate)
+ : AudioNodeStream(aEngine, NO_STREAM_FLAGS, aSampleRate)
{
MOZ_COUNT_CTOR(AudioNodeExternalInputStream);
}
AudioNodeExternalInputStream::~AudioNodeExternalInputStream()
{
MOZ_COUNT_DTOR(AudioNodeExternalInputStream);
}
/* static */ already_AddRefed<AudioNodeExternalInputStream>
AudioNodeExternalInputStream::Create(MediaStreamGraph* aGraph,
- AudioNodeEngine* aEngine,
- AbstractThread* aMainThread)
+ AudioNodeEngine* aEngine)
{
AudioContext* ctx = aEngine->NodeMainThread()->Context();
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aGraph->GraphRate() == ctx->SampleRate());
RefPtr<AudioNodeExternalInputStream> stream =
- new AudioNodeExternalInputStream(aEngine, aGraph->GraphRate(), aMainThread);
+ new AudioNodeExternalInputStream(aEngine, aGraph->GraphRate());
stream->mSuspendedCount += ctx->ShouldSuspendNewStream();
aGraph->AddStream(stream);
return stream.forget();
}
/**
* Copies the data in aInput to aOffsetInBlock within aBlock.
* aBlock must have been allocated with AllocateInputBlock and have a channel
--- a/dom/media/webaudio/AudioNodeExternalInputStream.h
+++ b/dom/media/webaudio/AudioNodeExternalInputStream.h
@@ -18,22 +18,22 @@ class AbstractThread;
* This is a MediaStream implementation that acts for a Web Audio node but
* unlike other AudioNodeStreams, supports any kind of MediaStream as an
* input --- handling any number of audio tracks and handling blocking of
* the input MediaStream.
*/
class AudioNodeExternalInputStream final : public AudioNodeStream
{
public:
- static already_AddRefed<AudioNodeExternalInputStream>
- Create(MediaStreamGraph* aGraph, AudioNodeEngine* aEngine, AbstractThread* aMainThread);
+ static already_AddRefed<AudioNodeExternalInputStream> Create(
+ MediaStreamGraph* aGraph,
+ AudioNodeEngine* aEngine);
protected:
- AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate,
- AbstractThread* aMainThread);
+ AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate);
~AudioNodeExternalInputStream();
public:
void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
private:
/**
* Determines if this is enabled or not. Disabled nodes produce silence.
--- a/dom/media/webaudio/AudioNodeStream.cpp
+++ b/dom/media/webaudio/AudioNodeStream.cpp
@@ -24,27 +24,26 @@ namespace mozilla {
* for regular audio contexts, and the rate requested by the web content
* for offline audio contexts.
* Each chunk in the track is a single block of WEBAUDIO_BLOCK_SIZE samples.
* Note: This must be a different value than MEDIA_STREAM_DEST_TRACK_ID
*/
AudioNodeStream::AudioNodeStream(AudioNodeEngine* aEngine,
Flags aFlags,
- TrackRate aSampleRate,
- AbstractThread* aMainThread)
- : ProcessedMediaStream(aMainThread),
- mEngine(aEngine),
- mSampleRate(aSampleRate),
- mFlags(aFlags),
- mNumberOfInputChannels(2),
- mIsActive(aEngine->IsActive()),
- mMarkAsFinishedAfterThisBlock(false),
- mAudioParamStream(false),
- mPassThrough(false)
+ TrackRate aSampleRate)
+ : ProcessedMediaStream()
+ , mEngine(aEngine)
+ , mSampleRate(aSampleRate)
+ , mFlags(aFlags)
+ , mNumberOfInputChannels(2)
+ , mIsActive(aEngine->IsActive())
+ , mMarkAsFinishedAfterThisBlock(false)
+ , mAudioParamStream(false)
+ , mPassThrough(false)
{
MOZ_ASSERT(NS_IsMainThread());
mSuspendedCount = !(mIsActive || mFlags & EXTERNAL_OUTPUT);
mChannelCountMode = ChannelCountMode::Max;
mChannelInterpretation = ChannelInterpretation::Speakers;
// AudioNodes are always producing data
mHasCurrentData = true;
mLastChunks.SetLength(std::max(uint16_t(1), mEngine->OutputCount()));
@@ -73,18 +72,17 @@ AudioNodeStream::Create(AudioContext* aC
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_RELEASE_ASSERT(aGraph);
// MediaRecorders use an AudioNodeStream, but no AudioNode
AudioNode* node = aEngine->NodeMainThread();
RefPtr<AudioNodeStream> stream =
- new AudioNodeStream(aEngine, aFlags, aGraph->GraphRate(),
- aCtx->GetOwnerGlobal()->AbstractMainThreadFor(TaskCategory::Other));
+ new AudioNodeStream(aEngine, aFlags, aGraph->GraphRate());
stream->mSuspendedCount += aCtx->ShouldSuspendNewStream();
if (node) {
stream->SetChannelMixingParametersImpl(node->ChannelCount(),
node->ChannelCountModeValue(),
node->ChannelInterpretationValue());
}
aGraph->AddStream(stream);
return stream.forget();
--- a/dom/media/webaudio/AudioNodeStream.h
+++ b/dom/media/webaudio/AudioNodeStream.h
@@ -71,18 +71,17 @@ public:
MediaStreamGraph* aGraph);
protected:
/**
* Transfers ownership of aEngine to the new AudioNodeStream.
*/
AudioNodeStream(AudioNodeEngine* aEngine,
Flags aFlags,
- TrackRate aSampleRate,
- AbstractThread* aMainThread);
+ TrackRate aSampleRate);
~AudioNodeStream();
public:
// Control API
/**
* Sets a parameter that's a time relative to some stream's played time.
* This time is converted to a time relative to this stream when it's set.
--- a/dom/media/webaudio/BiquadFilterNode.cpp
+++ b/dom/media/webaudio/BiquadFilterNode.cpp
@@ -154,34 +154,32 @@ public:
}
if (!hasTail) {
if (!mBiquads.IsEmpty()) {
mBiquads.Clear();
aStream->ScheduleCheckForInactive();
RefPtr<PlayingRefChangeHandler> refchanged =
new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::RELEASE);
- aStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- refchanged.forget());
+ aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ refchanged.forget());
}
aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
return;
}
PodArrayZero(inputBuffer);
} else if(mBiquads.Length() != aInput.ChannelCount()){
if (mBiquads.IsEmpty()) {
RefPtr<PlayingRefChangeHandler> refchanged =
new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::ADDREF);
- aStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- refchanged.forget());
+ aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ refchanged.forget());
} else { // Help people diagnose bug 924718
WebAudioUtils::LogToDeveloperConsole(mWindowID,
"BiquadFilterChannelCountChangeWarning");
}
// Adjust the number of biquads based on the number of channels
mBiquads.SetLength(aInput.ChannelCount());
}
--- a/dom/media/webaudio/ConvolverNode.cpp
+++ b/dom/media/webaudio/ConvolverNode.cpp
@@ -120,19 +120,18 @@ public:
input.AllocateChannels(1);
WriteZeroesToAudioBlock(&input, 0, WEBAUDIO_BLOCK_SIZE);
} else {
if (mLeftOverData != INT32_MIN) {
mLeftOverData = INT32_MIN;
aStream->ScheduleCheckForInactive();
RefPtr<PlayingRefChanged> refchanged =
new PlayingRefChanged(aStream, PlayingRefChanged::RELEASE);
- aStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- refchanged.forget());
+ aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ refchanged.forget());
}
aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
return;
}
} else {
if (aInput.mVolume != 1.0f) {
// Pre-multiply the input's volume
uint32_t numChannels = aInput.ChannelCount();
@@ -142,19 +141,18 @@ public:
float* dest = input.ChannelFloatsForWrite(i);
AudioBlockCopyChannelWithScale(src, aInput.mVolume, dest);
}
}
if (mLeftOverData <= 0) {
RefPtr<PlayingRefChanged> refchanged =
new PlayingRefChanged(aStream, PlayingRefChanged::ADDREF);
- aStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- refchanged.forget());
+ aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ refchanged.forget());
}
mLeftOverData = mBufferLength;
MOZ_ASSERT(mLeftOverData > 0);
}
aOutput->AllocateChannels(2);
mReverb->process(&input, aOutput);
}
--- a/dom/media/webaudio/DelayNode.cpp
+++ b/dom/media/webaudio/DelayNode.cpp
@@ -77,36 +77,34 @@ public:
bool* aFinished) override
{
MOZ_ASSERT(aStream->SampleRate() == mDestination->SampleRate());
if (!aInput.IsSilentOrSubnormal()) {
if (mLeftOverData <= 0) {
RefPtr<PlayingRefChanged> refchanged =
new PlayingRefChanged(aStream, PlayingRefChanged::ADDREF);
- aStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- refchanged.forget());
+ aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ refchanged.forget());
}
mLeftOverData = mBuffer.MaxDelayTicks();
} else if (mLeftOverData > 0) {
mLeftOverData -= WEBAUDIO_BLOCK_SIZE;
} else {
if (mLeftOverData != INT32_MIN) {
mLeftOverData = INT32_MIN;
aStream->ScheduleCheckForInactive();
// Delete our buffered data now we no longer need it
mBuffer.Reset();
RefPtr<PlayingRefChanged> refchanged =
new PlayingRefChanged(aStream, PlayingRefChanged::RELEASE);
- aStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- refchanged.forget());
+ aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ refchanged.forget());
}
aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
return;
}
mBuffer.Write(aInput);
// Skip output update if mLastChunks has already been set by
--- a/dom/media/webaudio/IIRFilterNode.cpp
+++ b/dom/media/webaudio/IIRFilterNode.cpp
@@ -51,33 +51,31 @@ public:
// all filter buffer values are zero, so the output will be zero
// as well.
if (allZero) {
mIIRFilters.Clear();
aStream->ScheduleCheckForInactive();
RefPtr<PlayingRefChangeHandler> refchanged =
new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::RELEASE);
- aStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- refchanged.forget());
+ aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ refchanged.forget());
aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
return;
}
PodZero(alignedInputBuffer, WEBAUDIO_BLOCK_SIZE);
}
} else if(mIIRFilters.Length() != aInput.ChannelCount()){
if (mIIRFilters.IsEmpty()) {
RefPtr<PlayingRefChangeHandler> refchanged =
new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::ADDREF);
- aStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- refchanged.forget());
+ aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ refchanged.forget());
} else {
WebAudioUtils::LogToDeveloperConsole(mWindowID,
"IIRFilterChannelCountChangeWarning");
}
// Adjust the number of filters based on the number of channels
mIIRFilters.SetLength(aInput.ChannelCount());
for (size_t i = 0; i < aInput.ChannelCount(); ++i) {
--- a/dom/media/webaudio/MediaStreamAudioSourceNode.cpp
+++ b/dom/media/webaudio/MediaStreamAudioSourceNode.cpp
@@ -79,19 +79,17 @@ MediaStreamAudioSourceNode::Init(DOMMedi
MediaStreamGraph* graph = Context()->Graph();
if (NS_WARN_IF(graph != inputStream->Graph())) {
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}
mInputStream = aMediaStream;
AudioNodeEngine* engine = new MediaStreamAudioSourceNodeEngine(this);
- mStream =
- AudioNodeExternalInputStream::Create(graph, engine,
- aMediaStream->AbstractMainThread());
+ mStream = AudioNodeExternalInputStream::Create(graph, engine);
mInputStream->AddConsumerToKeepAlive(static_cast<nsIDOMEventTarget*>(this));
mInputStream->RegisterTrackListener(this);
AttachToFirstTrack(mInputStream);
}
void
MediaStreamAudioSourceNode::Destroy()
--- a/dom/media/webaudio/PannerNode.cpp
+++ b/dom/media/webaudio/PannerNode.cpp
@@ -203,30 +203,28 @@ public:
} else {
if (mLeftOverData != INT_MIN) {
mLeftOverData = INT_MIN;
aStream->ScheduleCheckForInactive();
mHRTFPanner->reset();
RefPtr<PlayingRefChangeHandler> refchanged =
new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::RELEASE);
- aStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- refchanged.forget());
+ aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ refchanged.forget());
}
aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
return;
}
} else if (mPanningModelFunction == &PannerNodeEngine::HRTFPanningFunction) {
if (mLeftOverData == INT_MIN) {
RefPtr<PlayingRefChangeHandler> refchanged =
new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::ADDREF);
- aStream->Graph()->
- DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
- refchanged.forget());
+ aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+ refchanged.forget());
}
mLeftOverData = mHRTFPanner->maxTailFrames();
}
StreamTime tick = mDestination->GraphTimeToStreamTime(aFrom);
(this->*mPanningModelFunction)(aInput, aOutput, tick);
}