Bug 1375562 - Don't use AudiContext::Suspend when freezing a page, as it has a bunch of side effects. r?karlt
MozReview-Commit-ID: DgacqnSL0Ii
--- a/dom/base/nsGlobalWindow.cpp
+++ b/dom/base/nsGlobalWindow.cpp
@@ -12570,18 +12570,17 @@ nsGlobalWindow::Suspend()
mozilla::dom::workers::SuspendWorkersForWindow(AsInner());
SuspendIdleRequests();
mTimeoutManager->Suspend();
// Suspend all of the AudioContexts for this window
for (uint32_t i = 0; i < mAudioContexts.Length(); ++i) {
- ErrorResult dummy;
- RefPtr<Promise> d = mAudioContexts[i]->Suspend(dummy);
+ mAudioContexts[i]->SuspendFromChrome();
}
}
void
nsGlobalWindow::Resume()
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_DIAGNOSTIC_ASSERT(IsInnerWindow());
@@ -12614,18 +12613,17 @@ nsGlobalWindow::Resume()
for (uint32_t i = 0; i < mEnabledSensors.Length(); i++)
ac->AddWindowListener(mEnabledSensors[i], this);
}
EnableGamepadUpdates();
EnableVRUpdates();
// Resume all of the AudioContexts for this window
for (uint32_t i = 0; i < mAudioContexts.Length(); ++i) {
- ErrorResult dummy;
- RefPtr<Promise> d = mAudioContexts[i]->Resume(dummy);
+ mAudioContexts[i]->ResumeFromChrome();
}
mTimeoutManager->Resume();
ResumeIdleRequests();
// Resume all of the workers for this window. We must do this
// after timeouts since workers may have queued events that can trigger
--- a/dom/media/GraphDriver.cpp
+++ b/dom/media/GraphDriver.cpp
@@ -545,22 +545,23 @@ AsyncCubebTask::Run()
}
// The thread will kill itself after a bit
return NS_OK;
}
StreamAndPromiseForOperation::StreamAndPromiseForOperation(MediaStream* aStream,
void* aPromise,
- dom::AudioContextOperation aOperation)
+ dom::AudioContextOperation aOperation,
+ dom::AudioContextStateCallSource aSource)
: mStream(aStream)
, mPromise(aPromise)
, mOperation(aOperation)
+ , mSource(aSource)
{
- // MOZ_ASSERT(aPromise);
}
AudioCallbackDriver::AudioCallbackDriver(MediaStreamGraphImpl* aGraphImpl)
: GraphDriver(aGraphImpl)
, mSampleRate(0)
, mInputChannels(1)
, mIterationDurationMS(MEDIA_GRAPH_TARGET_PERIOD_MS)
, mStarted(false)
@@ -1174,22 +1175,24 @@ bool
AudioCallbackDriver::IsStarted() {
mGraphImpl->GetMonitor().AssertCurrentThreadOwns();
return mStarted;
}
void
AudioCallbackDriver::EnqueueStreamAndPromiseForOperation(MediaStream* aStream,
void* aPromise,
- dom::AudioContextOperation aOperation)
+ dom::AudioContextOperation aOperation,
+ dom::AudioContextStateCallSource aSource)
{
MonitorAutoLock mon(mGraphImpl->GetMonitor());
mPromisesForOperation.AppendElement(StreamAndPromiseForOperation(aStream,
aPromise,
- aOperation));
+ aOperation,
+ aSource));
}
void AudioCallbackDriver::CompleteAudioContextOperations(AsyncCubebOperation aOperation)
{
AutoTArray<StreamAndPromiseForOperation, 1> array;
// We can't lock for the whole function because AudioContextOperationCompleted
// will grab the monitor
@@ -1200,19 +1203,23 @@ void AudioCallbackDriver::CompleteAudioC
for (uint32_t i = 0; i < array.Length(); i++) {
StreamAndPromiseForOperation& s = array[i];
if ((aOperation == AsyncCubebOperation::INIT &&
s.mOperation == dom::AudioContextOperation::Resume) ||
(aOperation == AsyncCubebOperation::SHUTDOWN &&
s.mOperation != dom::AudioContextOperation::Resume)) {
- GraphImpl()->AudioContextOperationCompleted(s.mStream,
- s.mPromise,
- s.mOperation);
+ // If the source of this operation is Content, we need to notify it of its
+ // completion.
+ if (s.mSource == dom::AudioContextStateCallSource::Content) {
+ GraphImpl()->AudioContextOperationCompleted(s.mStream,
+ s.mPromise,
+ s.mOperation);
+ }
array.RemoveElementAt(i);
i--;
}
}
if (!array.IsEmpty()) {
MonitorAutoLock mon(GraphImpl()->GetMonitor());
mPromisesForOperation.AppendElements(array);
--- a/dom/media/GraphDriver.h
+++ b/dom/media/GraphDriver.h
@@ -338,20 +338,22 @@ private:
// Time, in GraphTime, for each iteration
GraphTime mSlice;
};
struct StreamAndPromiseForOperation
{
StreamAndPromiseForOperation(MediaStream* aStream,
void* aPromise,
- dom::AudioContextOperation aOperation);
+ dom::AudioContextOperation aOperation,
+ dom::AudioContextStateCallSource aSource);
RefPtr<MediaStream> mStream;
void* mPromise;
dom::AudioContextOperation mOperation;
+ dom::AudioContextStateCallSource mSource;
};
enum AsyncCubebOperation {
INIT,
SHUTDOWN
};
/**
@@ -434,17 +436,18 @@ public:
AudioCallbackDriver* AsAudioCallbackDriver() override {
return this;
}
/* Enqueue a promise that is going to be resolved when a specific operation
* occurs on the cubeb stream. */
void EnqueueStreamAndPromiseForOperation(MediaStream* aStream,
void* aPromise,
- dom::AudioContextOperation aOperation);
+ dom::AudioContextOperation aOperation,
+ dom::AudioContextStateCallSource aSource);
/**
* Whether the audio callback is processing. This is for asserting only.
*/
bool InCallback();
bool OnThread() override { return !mStarted || InCallback(); }
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -3956,17 +3956,18 @@ MediaStreamGraphImpl::AudioContextOperat
nsCOMPtr<nsIRunnable> event = new dom::StateChangeTask(
aStream->AsAudioNodeStream(), aPromise, state);
mAbstractMainThread->Dispatch(event.forget());
}
void
MediaStreamGraphImpl::ApplyAudioContextOperationImpl(
MediaStream* aDestinationStream, const nsTArray<MediaStream*>& aStreams,
- AudioContextOperation aOperation, void* aPromise)
+ AudioContextOperation aOperation, void* aPromise,
+ AudioContextStateCallSource aSource)
{
MOZ_ASSERT(CurrentDriver()->OnThread());
SuspendOrResumeStreams(aOperation, aStreams);
bool switching = false;
GraphDriver* nextDriver = nullptr;
{
@@ -3990,17 +3991,17 @@ MediaStreamGraphImpl::ApplyAudioContextO
MOZ_ASSERT(nextDriver->AsAudioCallbackDriver());
driver = nextDriver->AsAudioCallbackDriver();
} else {
driver = new AudioCallbackDriver(this);
MonitorAutoLock lock(mMonitor);
CurrentDriver()->SwitchAtNextIteration(driver);
}
driver->EnqueueStreamAndPromiseForOperation(aDestinationStream,
- aPromise, aOperation);
+ aPromise, aOperation, aSource);
} else {
// We are resuming a context, but we are already using an
// AudioCallbackDriver, we can resolve the promise now.
AudioContextOperationCompleted(aDestinationStream, aPromise, aOperation);
}
}
// Close, suspend: check if we are going to switch to a
// SystemAudioCallbackDriver, and pass the promise to the AudioCallbackDriver
@@ -4009,84 +4010,88 @@ MediaStreamGraphImpl::ApplyAudioContextO
// here as well so we don't have to store the Promise(s) on the Graph.
if (aOperation != AudioContextOperation::Resume) {
bool shouldAEC = false;
bool audioTrackPresent = AudioTrackPresent(shouldAEC);
if (!audioTrackPresent && CurrentDriver()->AsAudioCallbackDriver()) {
CurrentDriver()->AsAudioCallbackDriver()->
EnqueueStreamAndPromiseForOperation(aDestinationStream, aPromise,
- aOperation);
+ aOperation, aSource);
SystemClockDriver* driver;
if (nextDriver) {
MOZ_ASSERT(!nextDriver->AsAudioCallbackDriver());
} else {
driver = new SystemClockDriver(this);
MonitorAutoLock lock(mMonitor);
CurrentDriver()->SwitchAtNextIteration(driver);
}
// We are closing or suspending an AudioContext, but we just got resumed.
// Queue the operation on the next driver so that the ordering is
// preserved.
} else if (!audioTrackPresent && switching) {
MOZ_ASSERT(nextDriver->AsAudioCallbackDriver());
nextDriver->AsAudioCallbackDriver()->
EnqueueStreamAndPromiseForOperation(aDestinationStream, aPromise,
- aOperation);
+ aOperation, aSource);
} else {
// We are closing or suspending an AudioContext, but something else is
// using the audio stream, we can resolve the promise now.
AudioContextOperationCompleted(aDestinationStream, aPromise, aOperation);
}
}
}
void
MediaStreamGraph::ApplyAudioContextOperation(MediaStream* aDestinationStream,
const nsTArray<MediaStream*>& aStreams,
AudioContextOperation aOperation,
- void* aPromise)
+ void* aPromise,
+ AudioContextStateCallSource aSource)
{
class AudioContextOperationControlMessage : public ControlMessage
{
public:
AudioContextOperationControlMessage(MediaStream* aDestinationStream,
const nsTArray<MediaStream*>& aStreams,
AudioContextOperation aOperation,
- void* aPromise)
+ void* aPromise,
+ AudioContextStateCallSource aSource)
: ControlMessage(aDestinationStream)
, mStreams(aStreams)
, mAudioContextOperation(aOperation)
, mPromise(aPromise)
+ , mSource(aSource)
{
}
void Run() override
{
mStream->GraphImpl()->ApplyAudioContextOperationImpl(mStream,
- mStreams, mAudioContextOperation, mPromise);
+ mStreams, mAudioContextOperation, mPromise, mSource);
}
void RunDuringShutdown() override
{
MOZ_ASSERT(mAudioContextOperation == AudioContextOperation::Close,
"We should be reviving the graph?");
}
private:
// We don't need strong references here for the same reason ControlMessage
// doesn't.
nsTArray<MediaStream*> mStreams;
AudioContextOperation mAudioContextOperation;
void* mPromise;
+ AudioContextStateCallSource mSource;
};
MediaStreamGraphImpl* graphImpl = static_cast<MediaStreamGraphImpl*>(this);
graphImpl->AppendMessage(
MakeUnique<AudioContextOperationControlMessage>(aDestinationStream, aStreams,
- aOperation, aPromise));
+ aOperation, aPromise, aSource));
}
bool
MediaStreamGraph::IsNonRealtime() const
{
return !static_cast<const MediaStreamGraphImpl*>(this)->mRealtime;
}
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -33,16 +33,17 @@ class nsAutoRefTraits<SpeexResamplerStat
};
namespace mozilla {
extern LazyLogModule gMediaStreamGraphLog;
namespace dom {
enum class AudioContextOperation;
+ enum class AudioContextStateCallSource;
}
namespace media {
template<typename V, typename E> class Pledge;
}
/*
* MediaStreamGraph is a framework for synchronized audio/video processing
@@ -1336,17 +1337,18 @@ public:
* This can possibly pause the graph thread, releasing system resources, if
* all streams have been suspended/closed.
*
* When the operation is complete, aPromise is resolved.
*/
void ApplyAudioContextOperation(MediaStream* aDestinationStream,
const nsTArray<MediaStream*>& aStreams,
dom::AudioContextOperation aState,
- void* aPromise);
+ void* aPromise,
+ dom::AudioContextStateCallSource aSource);
bool IsNonRealtime() const;
/**
* Start processing non-realtime for a specific number of ticks.
*/
void StartNonRealtimeProcessing(uint32_t aTicksToProcess);
/**
--- a/dom/media/MediaStreamGraphImpl.h
+++ b/dom/media/MediaStreamGraphImpl.h
@@ -330,17 +330,18 @@ public:
/**
* Apply and AudioContext operation (suspend/resume/closed), on the graph
* thread.
*/
void ApplyAudioContextOperationImpl(MediaStream* aDestinationStream,
const nsTArray<MediaStream*>& aStreams,
dom::AudioContextOperation aOperation,
- void* aPromise);
+ void* aPromise,
+ dom::AudioContextStateCallSource aSource);
/**
* Increment suspend count on aStream and move it to mSuspendedStreams if
* necessary.
*/
void IncrementSuspendCount(MediaStream* aStream);
/**
* Increment suspend count on aStream and move it to mStreams if
--- a/dom/media/webaudio/AudioContext.cpp
+++ b/dom/media/webaudio/AudioContext.cpp
@@ -824,20 +824,20 @@ AudioContext::OnStateChanged(void* aProm
static_cast<int>(mAudioContextState), static_cast<int>(aNewState));
MOZ_ASSERT(false);
}
#endif // DEBUG
#endif // XP_MACOSX
#endif // WIN32
- MOZ_ASSERT(
- mIsOffline || aPromise || aNewState == AudioContextState::Running,
- "We should have a promise here if this is a real-time AudioContext."
- "Or this is the first time we switch to \"running\".");
+ //MOZ_ASSERT(
+ //mIsOffline || aPromise || aNewState == AudioContextState::Running,
+ //"We should have a promise here if this is a real-time AudioContext."
+ //"Or this is the first time we switch to \"running\".");
if (aPromise) {
Promise* promise = reinterpret_cast<Promise*>(aPromise);
// It is possible for the promise to have been removed from
// mPromiseGripArray if the cycle collector has severed our connections. DO
// NOT dereference the promise pointer in that case since it may point to
// already freed memory.
if (mPromiseGripArray.Contains(promise)) {
@@ -863,16 +863,48 @@ AudioContext::GetAllStreams() const
MediaStream* s = iter.Get()->GetKey()->GetStream();
if (s) {
streams.AppendElement(s);
}
}
return streams;
}
+void
+AudioContext::SuspendFromChrome()
+{
+ // Already suspended, don't bother suspending again.
+ if (mAudioContextState == AudioContextState::Suspended ||
+ mIsOffline) {
+ return;
+ }
+ DestinationStream()->Suspend();
+ nsTArray<MediaStream*> streams = GetAllStreams();
+ Graph()->ApplyAudioContextOperation(DestinationStream()->AsAudioNodeStream(),
+ streams,
+ AudioContextOperation::Suspend, nullptr,
+ AudioContextStateCallSource::Chrome);
+}
+
+void
+AudioContext::ResumeFromChrome()
+{
+ if (mAudioContextState == AudioContextState::Suspended ||
+ mSuspendCalled ||
+ mIsOffline) {
+ return;
+ }
+ DestinationStream()->Resume();
+ nsTArray<MediaStream*> streams = GetAllStreams();
+ Graph()->ApplyAudioContextOperation(DestinationStream()->AsAudioNodeStream(),
+ streams,
+ AudioContextOperation::Resume, nullptr,
+ AudioContextStateCallSource::Chrome);
+}
+
already_AddRefed<Promise>
AudioContext::Suspend(ErrorResult& aRv)
{
nsCOMPtr<nsIGlobalObject> parentObject = do_QueryInterface(GetParentObject());
RefPtr<Promise> promise;
promise = Promise::Create(parentObject, aRv);
if (aRv.Failed()) {
return nullptr;
@@ -897,17 +929,18 @@ AudioContext::Suspend(ErrorResult& aRv)
// so don't suspend them again (since suspend(); suspend(); resume(); should
// cancel both suspends). But we still need to do ApplyAudioContextOperation
// to ensure our new promise is resolved.
if (!mSuspendCalled) {
streams = GetAllStreams();
}
Graph()->ApplyAudioContextOperation(DestinationStream()->AsAudioNodeStream(),
streams,
- AudioContextOperation::Suspend, promise);
+ AudioContextOperation::Suspend, promise,
+ AudioContextStateCallSource::Content);
mSuspendCalled = true;
return promise.forget();
}
already_AddRefed<Promise>
AudioContext::Resume(ErrorResult& aRv)
@@ -938,17 +971,18 @@ AudioContext::Resume(ErrorResult& aRv)
// be OK). But we still need to do ApplyAudioContextOperation
// to ensure our new promise is resolved.
if (mSuspendCalled) {
streams = GetAllStreams();
}
mPromiseGripArray.AppendElement(promise);
Graph()->ApplyAudioContextOperation(DestinationStream()->AsAudioNodeStream(),
streams,
- AudioContextOperation::Resume, promise);
+ AudioContextOperation::Resume, promise,
+ AudioContextStateCallSource::Content);
mSuspendCalled = false;
return promise.forget();
}
already_AddRefed<Promise>
AudioContext::Close(ErrorResult& aRv)
@@ -983,17 +1017,18 @@ AudioContext::Close(ErrorResult& aRv)
nsTArray<MediaStream*> streams;
// If mSuspendCalled or mCloseCalled are true then we already suspended
// all our streams, so don't suspend them again. But we still need to do
// ApplyAudioContextOperation to ensure our new promise is resolved.
if (!mSuspendCalled && !mCloseCalled) {
streams = GetAllStreams();
}
Graph()->ApplyAudioContextOperation(ds->AsAudioNodeStream(), streams,
- AudioContextOperation::Close, promise);
+ AudioContextOperation::Close, promise,
+ AudioContextStateCallSource::Content);
}
mCloseCalled = true;
return promise.forget();
}
void
AudioContext::RegisterNode(AudioNode* aNode)
--- a/dom/media/webaudio/AudioContext.h
+++ b/dom/media/webaudio/AudioContext.h
@@ -111,16 +111,26 @@ private:
RefPtr<AudioContext> mAudioContext;
void* mPromise;
RefPtr<AudioNodeStream> mAudioNodeStream;
AudioContextState mNewState;
};
enum class AudioContextOperation { Suspend, Resume, Close };
+// When suspending or resuming an AudioContext, Content operation notify the
+// main thread, so that the Promise is resolved, the state is modified, and the
+// statechanged event is sent. Chrome operation don't go back to the main
+// thread.
+enum class AudioContextStateCallSource
+{
+ Content,
+ Chrome
+};
+
class AudioContext final : public DOMEventTargetHelper,
public nsIMemoryReporter
{
AudioContext(nsPIDOMWindowInner* aParentWindow,
bool aIsOffline,
AudioChannel aChannel,
uint32_t aNumberOfChannels = 0,
uint32_t aLength = 0,
@@ -178,28 +188,34 @@ public:
double CurrentTime() const;
AudioListener* Listener();
AudioContextState State() const { return mAudioContextState; }
bool IsRunning() const;
+
// Those three methods return a promise to content, that is resolved when an
// (possibly long) operation is completed on the MSG (and possibly other)
// thread(s). To avoid having to match the calls and asychronous result when
// the operation is completed, we keep a reference to the promises on the main
// thread, and then send the promises pointers down the MSG thread, as a void*
// (to make it very clear that the pointer is to merely be treated as an ID).
// When back on the main thread, we can resolve or reject the promise, by
// casting it back to a `Promise*` while asserting we're back on the main
// thread and removing the reference we added.
already_AddRefed<Promise> Suspend(ErrorResult& aRv);
already_AddRefed<Promise> Resume(ErrorResult& aRv);
already_AddRefed<Promise> Close(ErrorResult& aRv);
+ // This suspends or resume an AudioContext without having to deal with
+ // Promises or changing the state, etc.
+ void SuspendFromChrome();
+ void ResumeFromChrome();
+
IMPL_EVENT_HANDLER(statechange)
already_AddRefed<AudioBufferSourceNode> CreateBufferSource(ErrorResult& aRv);
already_AddRefed<ConstantSourceNode> CreateConstantSource(ErrorResult& aRv);
already_AddRefed<AudioBuffer>
CreateBuffer(uint32_t aNumberOfChannels, uint32_t aLength, float aSampleRate,
@@ -333,17 +349,23 @@ private:
private:
// Each AudioContext has an id, that is passed down the MediaStreams that
// back the AudioNodes, so we can easily compute the set of all the
// MediaStreams for a given context, on the MediasStreamGraph side.
const AudioContextId mId;
// Note that it's important for mSampleRate to be initialized before
// mDestination, as mDestination's constructor needs to access it!
const float mSampleRate;
+ // The state of this AudioContext, as seen and set by content. This is
+ // modified by calling suspend/resume/close on an AudioContext.
AudioContextState mAudioContextState;
+ // The state of this AudioContext, from content. This is set by Gecko itself
+ // when it want to suspend/resume a page. This takes precedence over the
+ // "content" state above.
+ AudioContextState mChromeAudioContextState;
RefPtr<AudioDestinationNode> mDestination;
RefPtr<AudioListener> mListener;
nsTArray<UniquePtr<WebAudioDecodeJob> > mDecodeJobs;
// This array is used to keep the suspend/resume/close promises alive until
// they are resolved, so we can safely pass them accross threads.
nsTArray<RefPtr<Promise>> mPromiseGripArray;
// See RegisterActiveNode. These will keep the AudioContext alive while it
// is rendering and the window remains alive.