--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -1253,16 +1253,32 @@ void HTMLMediaElement::NotifyMediaTrackE
MOZ_ASSERT((aTrack->AsAudioTrack() && aTrack->AsAudioTrack()->Enabled()) ||
(aTrack->AsVideoTrack() && aTrack->AsVideoTrack()->Selected()));
if (aTrack->AsAudioTrack()) {
SetMutedInternal(mMuted & ~MUTED_BY_AUDIO_TRACK);
} else if (aTrack->AsVideoTrack()) {
mDisableVideo = false;
}
+
+ if (mReadyState == HAVE_NOTHING) {
+ // No MediaStreamTracks are captured until we have metadata.
+ return;
+ }
+ if (aTrack->AsVideoTrack() && !IsVideo()) {
+ // An audio element does not capture video tracks.
+ return;
+ }
+ for (OutputMediaStream& ms : mOutputStreams) {
+ if (aTrack->AsVideoTrack() && ms.mCapturingAudioOnly) {
+ // If the output stream is for audio only we ignore video tracks.
+ continue;
+ }
+ AddCaptureMediaTrackToOutputStream(aTrack, ms);
+ }
}
void HTMLMediaElement::NotifyMediaTrackDisabled(MediaTrack* aTrack)
{
MOZ_ASSERT(aTrack);
if (!aTrack) {
return;
}
@@ -1289,16 +1305,36 @@ void HTMLMediaElement::NotifyMediaTrackD
if (shouldMute) {
SetMutedInternal(mMuted | MUTED_BY_AUDIO_TRACK);
}
} else if (aTrack->AsVideoTrack()) {
if (VideoTracks()->SelectedIndex() == -1) {
mDisableVideo = false;
}
}
+
+ for (OutputMediaStream& ms : mOutputStreams) {
+ if (ms.mCapturingDecoder) {
+ MOZ_ASSERT(!ms.mCapturingMediaStream);
+ continue;
+ }
+ for (int32_t i = ms.mTrackPorts.Length() - 1; i >= 0; --i) {
+ if (ms.mTrackPorts[i].first() == aTrack->GetId()) {
+ ms.mTrackPorts[i].second()->Destroy();
+ ms.mTrackPorts.RemoveElementAt(i);
+ break;
+ }
+ }
+#ifdef DEBUG
+ for (auto pair : ms.mTrackPorts) {
+ MOZ_ASSERT(pair.first() != aTrack->GetId(),
+ "The same MediaTrack was forwarded to the output stream more than once. This shouldn't happen.");
+ }
+#endif
+ }
}
void HTMLMediaElement::NotifyMediaStreamTracksAvailable(DOMMediaStream* aStream)
{
if (!mSrcStream || mSrcStream != aStream) {
return;
}
@@ -2121,16 +2157,91 @@ NS_IMETHODIMP HTMLMediaElement::SetMuted
} else {
SetMutedInternal(mMuted & ~MUTED_BY_CONTENT);
}
DispatchAsyncEvent(NS_LITERAL_STRING("volumechange"));
return NS_OK;
}
+class HTMLMediaElement::StreamCaptureTrackSource :
+ public MediaStreamTrackSource,
+ public MediaStreamTrackSource::Sink
+{
+public:
+ NS_DECL_ISUPPORTS_INHERITED
+ NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(StreamCaptureTrackSource,
+ MediaStreamTrackSource)
+
+ explicit StreamCaptureTrackSource(MediaStreamTrackSource* aCapturedTrackSource)
+ : MediaStreamTrackSource(aCapturedTrackSource->GetPrincipal(),
+ true,
+ nsString())
+ , mCapturedTrackSource(aCapturedTrackSource)
+ {
+ mCapturedTrackSource->RegisterSink(this);
+ }
+
+ void Destroy() override
+ {
+ MOZ_ASSERT(mCapturedTrackSource);
+ if (mCapturedTrackSource) {
+ mCapturedTrackSource->UnregisterSink(this);
+ }
+ }
+
+ MediaSourceEnum GetMediaSource() const override
+ {
+ return MediaSourceEnum::Other;
+ }
+
+ CORSMode GetCORSMode() const override
+ {
+ return mCapturedTrackSource->GetCORSMode();
+ }
+
+ already_AddRefed<PledgeVoid>
+ ApplyConstraints(nsPIDOMWindowInner* aWindow,
+ const dom::MediaTrackConstraints& aConstraints) override
+ {
+ RefPtr<PledgeVoid> p = new PledgeVoid();
+ p->Reject(new dom::MediaStreamError(aWindow,
+ NS_LITERAL_STRING("OverconstrainedError"),
+ NS_LITERAL_STRING("")));
+ return p.forget();
+ }
+
+ void Stop() override
+ {
+ NS_ERROR("We're reporting remote=true to not be stoppable. "
+ "Stop() should not be called.");
+ }
+
+ void PrincipalChanged() override
+ {
+ mPrincipal = mCapturedTrackSource->GetPrincipal();
+ MediaStreamTrackSource::PrincipalChanged();
+ }
+
+private:
+ virtual ~StreamCaptureTrackSource() {}
+
+ RefPtr<MediaStreamTrackSource> mCapturedTrackSource;
+};
+
+NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
+ MediaStreamTrackSource)
+NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
+ MediaStreamTrackSource)
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::StreamCaptureTrackSource)
+NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSource)
+NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
+ MediaStreamTrackSource,
+ mCapturedTrackSource)
+
class HTMLMediaElement::DecoderCaptureTrackSource :
public MediaStreamTrackSource,
public DecoderPrincipalChangeObserver
{
public:
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DecoderCaptureTrackSource,
MediaStreamTrackSource)
@@ -2215,16 +2326,21 @@ public:
MediaStreamTrackSourceGetter)
explicit CaptureStreamTrackSourceGetter(HTMLMediaElement* aElement)
: mElement(aElement) {}
already_AddRefed<dom::MediaStreamTrackSource>
GetMediaStreamTrackSource(TrackID aInputTrackID) override
{
+ if (mElement && mElement->mSrcStream) {
+ NS_ERROR("Captured media element playing a stream adds tracks explicitly on main thread.");
+ return nullptr;
+ }
+
// We can return a new source each time here, even for different streams,
// since the sources don't keep any internal state and all of them call
// through to the same HTMLMediaElement.
// If this changes (after implementing Stop()?) we'll have to ensure we
// return the same source for all requests to the same TrackID, and only
// have one getter.
return do_AddRef(new DecoderCaptureTrackSource(mElement));
}
@@ -2240,18 +2356,112 @@ NS_IMPL_ADDREF_INHERITED(HTMLMediaElemen
NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::CaptureStreamTrackSourceGetter,
MediaStreamTrackSourceGetter)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::CaptureStreamTrackSourceGetter)
NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSourceGetter)
NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::CaptureStreamTrackSourceGetter,
MediaStreamTrackSourceGetter,
mElement)
+void
+HTMLMediaElement::SetCapturedOutputStreamsEnabled(bool aEnabled) {
+ for (OutputMediaStream& ms : mOutputStreams) {
+ if (ms.mCapturingDecoder) {
+ MOZ_ASSERT(!ms.mCapturingMediaStream);
+ continue;
+ }
+ for (auto pair : ms.mTrackPorts) {
+ MediaStream* outputSource = ms.mStream->GetInputStream();
+ if (!outputSource) {
+ NS_ERROR("No output source stream");
+ return;
+ }
+
+ TrackID id = pair.second()->GetDestinationTrackId();
+ outputSource->SetTrackEnabled(id, aEnabled);
+
+ LOG(LogLevel::Debug,
+ ("%s track %d for captured MediaStream %p",
+ aEnabled ? "Enabled" : "Disabled", id, ms.mStream.get()));
+ }
+ }
+}
+
+void
+HTMLMediaElement::AddCaptureMediaTrackToOutputStream(MediaTrack* aTrack,
+ OutputMediaStream& aOutputStream)
+{
+ if (aOutputStream.mCapturingDecoder) {
+ MOZ_ASSERT(!aOutputStream.mCapturingMediaStream);
+ return;
+ }
+ aOutputStream.mCapturingMediaStream = true;
+
+ MediaStream* outputSource = aOutputStream.mStream->GetInputStream();
+ if (!outputSource) {
+ NS_ERROR("No output source stream");
+ return;
+ }
+
+ ProcessedMediaStream* processedOutputSource =
+ outputSource->AsProcessedStream();
+ if (!processedOutputSource) {
+ NS_ERROR("Input stream not a ProcessedMediaStream");
+ return;
+ }
+
+ if (!aTrack) {
+ MOZ_ASSERT(false, "Bad MediaTrack");
+ return;
+ }
+
+ MediaStreamTrack* inputTrack = mSrcStream->GetTrackById(aTrack->GetId());
+ MOZ_ASSERT(inputTrack);
+ if (!inputTrack) {
+ NS_ERROR("Input track not found in source stream");
+ return;
+ }
+
+#if DEBUG
+ for (auto pair : aOutputStream.mTrackPorts) {
+ MOZ_ASSERT(pair.first() != aTrack->GetId(),
+ "Captured track already captured to output stream");
+ }
+#endif
+
+ // Track is muted initially, so we don't leak data if it's added while paused
+ // and an MSG iteration passes before the mute comes into effect.
+ TrackID destinationTrackID = aOutputStream.mNextAvailableTrackID++;
+ processedOutputSource->SetTrackEnabled(destinationTrackID, false);
+ RefPtr<MediaInputPort> port =
+ inputTrack->ForwardTrackContentsTo(processedOutputSource,
+ destinationTrackID);
+ RefPtr<MediaStreamTrackSource> source =
+ new StreamCaptureTrackSource(&inputTrack->GetSource());
+
+ aOutputStream.mStream->CreateDOMTrack(
+ destinationTrackID,
+ inputTrack->AsAudioStreamTrack() ? MediaSegment::AUDIO
+ : MediaSegment::VIDEO,
+ source, MediaTrackConstraints(), true);
+ LOG(LogLevel::Debug,
+ ("Created %s track %d for captured MediaStream %p",
+ inputTrack->AsAudioStreamTrack() ? "audio" : "video",
+ destinationTrackID, aOutputStream.mStream.get()));
+ Pair<nsString, RefPtr<MediaInputPort>> p(aTrack->GetId(), port);
+ aOutputStream.mTrackPorts.AppendElement(Move(p));
+
+ if (mSrcStreamIsPlaying) {
+ processedOutputSource->SetTrackEnabled(destinationTrackID, true);
+ }
+}
+
already_AddRefed<DOMMediaStream>
HTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded,
+ bool aCaptureAudio,
MediaStreamGraph* aGraph)
{
nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
if (!window) {
return nullptr;
}
#ifdef MOZ_EME
if (ContainsRestrictedContent()) {
@@ -2270,44 +2480,118 @@ HTMLMediaElement::CaptureStreamInternal(
aGraph != mOutputStreams[0].mStream->GetInputStream()->Graph()) {
return nullptr;
}
OutputMediaStream* out = mOutputStreams.AppendElement();
MediaStreamTrackSourceGetter* getter = new CaptureStreamTrackSourceGetter(this);
out->mStream = DOMMediaStream::CreateTrackUnionStreamAsInput(window, aGraph, getter);
out->mFinishWhenEnded = aFinishWhenEnded;
-
- mAudioCaptured = true;
+ out->mCapturingAudioOnly = aCaptureAudio;
+
+ if (aCaptureAudio) {
+ if (mSrcStream) {
+ // We don't support applying volume and mute to the captured stream, when
+ // capturing a MediaStream.
+ nsContentUtils::ReportToConsole(nsIScriptError::errorFlag,
+ NS_LITERAL_CSTRING("Media"),
+ OwnerDoc(),
+ nsContentUtils::eDOM_PROPERTIES,
+ "MediaElementAudioCaptureOfMediaStreamError");
+ return nullptr;
+ }
+
+ // mAudioCaptured tells the user that the audio played by this media element
+ // is being routed to the captureStreams *instead* of being played to
+ // speakers.
+ mAudioCaptured = true;
+ }
+
+ if (mReadyState == HAVE_NOTHING) {
+ // Do not expose the tracks directly before we have metadata.
+ RefPtr<DOMMediaStream> result = out->mStream;
+ return result.forget();
+ }
+
if (mDecoder) {
+ out->mCapturingDecoder = true;
mDecoder->AddOutputStream(out->mStream->GetInputStream()->AsProcessedStream(),
aFinishWhenEnded);
- if (mReadyState >= HAVE_METADATA) {
- // Expose the tracks to JS directly.
- if (HasAudio()) {
- TrackID audioTrackId = mMediaInfo.mAudio.mTrackId;
- RefPtr<MediaStreamTrackSource> trackSource =
- getter->GetMediaStreamTrackSource(audioTrackId);
- out->mStream->CreateDOMTrack(audioTrackId, MediaSegment::AUDIO,
- trackSource);
+ if (HasAudio()) {
+ TrackID audioTrackId = mMediaInfo.mAudio.mTrackId;
+ RefPtr<MediaStreamTrackSource> trackSource =
+ getter->GetMediaStreamTrackSource(audioTrackId);
+ out->mStream->CreateDOMTrack(audioTrackId, MediaSegment::AUDIO,
+ trackSource, MediaTrackConstraints(), true);
+ LOG(LogLevel::Debug,
+ ("Created audio track %d for captured decoder", audioTrackId));
+ }
+ if (IsVideo() && HasVideo() && !out->mCapturingAudioOnly) {
+ TrackID videoTrackId = mMediaInfo.mVideo.mTrackId;
+ RefPtr<MediaStreamTrackSource> trackSource =
+ getter->GetMediaStreamTrackSource(videoTrackId);
+ out->mStream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO,
+ trackSource, MediaTrackConstraints(), true);
+ LOG(LogLevel::Debug,
+ ("Created video track %d for captured decoder", videoTrackId));
+ }
+ }
+
+ if (mSrcStream) {
+ out->mCapturingMediaStream = true;
+ MediaStream* inputStream = out->mStream->GetInputStream();
+ if (!inputStream) {
+ NS_ERROR("No input stream");
+ RefPtr<DOMMediaStream> result = out->mStream;
+ return result.forget();
+ }
+
+ ProcessedMediaStream* processedInputStream =
+ inputStream->AsProcessedStream();
+ if (!processedInputStream) {
+ NS_ERROR("Input stream not a ProcessedMediaStream");
+ RefPtr<DOMMediaStream> result = out->mStream;
+ return result.forget();
+ }
+
+ for (size_t i = 0; i < AudioTracks()->Length(); ++i) {
+ AudioTrack* t = (*AudioTracks())[i];
+ if (t->Enabled()) {
+ AddCaptureMediaTrackToOutputStream(t, *out);
}
- if (HasVideo()) {
- TrackID videoTrackId = mMediaInfo.mVideo.mTrackId;
- RefPtr<MediaStreamTrackSource> trackSource =
- getter->GetMediaStreamTrackSource(videoTrackId);
- out->mStream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO,
- trackSource);
+ }
+ if (IsVideo() && !out->mCapturingAudioOnly) {
+ // Only add video tracks if we're a video element and the output stream
+ // wants video.
+ for (size_t i = 0; i < VideoTracks()->Length(); ++i) {
+ VideoTrack* t = (*VideoTracks())[i];
+ if (t->Selected()) {
+ AddCaptureMediaTrackToOutputStream(t, *out);
+ }
}
}
}
RefPtr<DOMMediaStream> result = out->mStream;
return result.forget();
}
already_AddRefed<DOMMediaStream>
+HTMLMediaElement::CaptureAudio(ErrorResult& aRv,
+ MediaStreamGraph* aGraph)
+{
+ RefPtr<DOMMediaStream> stream = CaptureStreamInternal(false, aGraph);
+ if (!stream) {
+ aRv.Throw(NS_ERROR_FAILURE);
+ return nullptr;
+ }
+
+ return stream.forget();
+}
+
+already_AddRefed<DOMMediaStream>
HTMLMediaElement::MozCaptureStream(ErrorResult& aRv,
MediaStreamGraph* aGraph)
{
RefPtr<DOMMediaStream> stream = CaptureStreamInternal(false, aGraph);
if (!stream) {
aRv.Throw(NS_ERROR_FAILURE);
return nullptr;
}
@@ -2843,16 +3127,30 @@ HTMLMediaElement::WakeLockRelease()
if (mWakeLock) {
ErrorResult rv;
mWakeLock->Unlock(rv);
rv.SuppressException();
mWakeLock = nullptr;
}
}
+HTMLMediaElement::OutputMediaStream::OutputMediaStream()
+ : mFinishWhenEnded(false)
+ , mCapturingAudioOnly(false)
+ , mCapturingDecoder(false)
+ , mCapturingMediaStream(false)
+ , mNextAvailableTrackID(1) {}
+
+HTMLMediaElement::OutputMediaStream::~OutputMediaStream()
+{
+ for (auto pair : mTrackPorts) {
+ pair.second()->Destroy();
+ }
+}
+
bool HTMLMediaElement::ParseAttribute(int32_t aNamespaceID,
nsIAtom* aAttribute,
const nsAString& aValue,
nsAttrValue& aResult)
{
// Mappings from 'preload' attribute strings to an enumeration.
static const nsAttrValue::EnumTable kPreloadTable[] = {
{ "", HTMLMediaElement::PRELOAD_ATTR_EMPTY },
@@ -3409,20 +3707,25 @@ nsresult HTMLMediaElement::FinishDecoder
nsresult rv = aDecoder->Load(aListener);
if (NS_FAILED(rv)) {
ShutdownDecoder();
LOG(LogLevel::Debug, ("%p Failed to load for decoder %p", this, aDecoder));
return rv;
}
- for (uint32_t i = 0; i < mOutputStreams.Length(); ++i) {
- OutputMediaStream* ms = &mOutputStreams[i];
- aDecoder->AddOutputStream(ms->mStream->GetInputStream()->AsProcessedStream(),
- ms->mFinishWhenEnded);
+ for (OutputMediaStream& ms : mOutputStreams) {
+ if (ms.mCapturingMediaStream) {
+ MOZ_ASSERT(!ms.mCapturingDecoder);
+ continue;
+ }
+
+ ms.mCapturingDecoder = true;
+ aDecoder->AddOutputStream(ms.mStream->GetInputStream()->AsProcessedStream(),
+ ms.mFinishWhenEnded);
}
#ifdef MOZ_EME
if (mMediaKeys) {
if (mMediaKeys->GetCDMProxy()) {
mDecoder->SetCDMProxy(mMediaKeys->GetCDMProxy());
} else {
// CDM must have crashed.
@@ -3659,27 +3962,31 @@ void HTMLMediaElement::UpdateSrcMediaStr
stream->AddAudioOutput(this);
SetVolumeInternal();
VideoFrameContainer* container = GetVideoFrameContainer();
if (container) {
stream->AddVideoOutput(container);
}
+
+ SetCapturedOutputStreamsEnabled(true); // Mute
} else {
if (stream) {
mSrcStreamPausedCurrentTime = CurrentTime();
stream->RemoveListener(mMediaStreamListener);
stream->RemoveAudioOutput(this);
VideoFrameContainer* container = GetVideoFrameContainer();
if (container) {
stream->RemoveVideoOutput(container);
}
+
+ SetCapturedOutputStreamsEnabled(false); // Unmute
}
// If stream is null, then DOMMediaStream::Destroy must have been
// called and that will remove all listeners/outputs.
mWatchManager.Unwatch(*mMediaStreamListener,
&HTMLMediaElement::UpdateReadyStateInternal);
mMediaStreamListener->Forget();
@@ -3743,30 +4050,37 @@ void HTMLMediaElement::EndSrcMediaStream
}
mSrcStream->UnregisterTrackListener(mMediaStreamTrackListener);
mMediaStreamTrackListener = nullptr;
mSrcStream->RemovePrincipalChangeObserver(this);
mSrcStreamVideoPrincipal = nullptr;
+ for (OutputMediaStream& ms : mOutputStreams) {
+ for (auto pair : ms.mTrackPorts) {
+ pair.second()->Destroy();
+ }
+ ms.mTrackPorts.Clear();
+ }
+
mSrcStream = nullptr;
}
static already_AddRefed<AudioTrack>
CreateAudioTrack(AudioStreamTrack* aStreamTrack)
{
nsAutoString id;
nsAutoString label;
aStreamTrack->GetId(id);
aStreamTrack->GetLabel(label);
return MediaTrackList::CreateAudioTrack(id, NS_LITERAL_STRING("main"),
label, EmptyString(),
- aStreamTrack->Enabled());
+ false);
}
static already_AddRefed<VideoTrack>
CreateVideoTrack(VideoStreamTrack* aStreamTrack)
{
nsAutoString id;
nsAutoString label;
aStreamTrack->GetId(id);
@@ -3786,36 +4100,40 @@ void HTMLMediaElement::ConstructMediaTra
for (const RefPtr<MediaStreamTrack>& track : tracks) {
if (track->Ended()) {
continue;
}
if (AudioStreamTrack* t = track->AsAudioStreamTrack()) {
RefPtr<AudioTrack> audioTrack = CreateAudioTrack(t);
AudioTracks()->AddTrack(audioTrack);
+ audioTrack->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
} else if (VideoStreamTrack* t = track->AsVideoStreamTrack()) {
+ if (!IsVideo()) {
+ continue;
+ }
RefPtr<VideoTrack> videoTrack = CreateVideoTrack(t);
VideoTracks()->AddTrack(videoTrack);
firstEnabledVideo = (t->Enabled() && firstEnabledVideo < 0)
? (VideoTracks()->Length() - 1)
: firstEnabledVideo;
}
}
- if (VideoTracks()->Length() > 0) {
+ if (firstEnabledVideo != -1) {
// If media resource does not indicate a particular set of video tracks to
// enable, the one that is listed first in the element's videoTracks object
// must be selected.
- int index = firstEnabledVideo >= 0 ? firstEnabledVideo : 0;
- (*VideoTracks())[index]->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
- VideoTrack* track = (*VideoTracks())[index];
- VideoStreamTrack* streamTrack = track->GetVideoStreamTrack();
- mMediaStreamSizeListener = new StreamSizeListener(this);
- streamTrack->AddDirectListener(mMediaStreamSizeListener);
- mSelectedVideoStreamTrack = streamTrack;
+ if (VideoTrack* track = (*VideoTracks())[firstEnabledVideo]) {
+ track->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
+ VideoStreamTrack* streamTrack = track->GetVideoStreamTrack();
+ mMediaStreamSizeListener = new StreamSizeListener(this);
+ streamTrack->AddDirectListener(mMediaStreamSizeListener);
+ mSelectedVideoStreamTrack = streamTrack;
+ }
}
}
void
HTMLMediaElement::NotifyMediaStreamTrackAdded(const RefPtr<MediaStreamTrack>& aTrack)
{
MOZ_ASSERT(aTrack);
@@ -3825,31 +4143,33 @@ HTMLMediaElement::NotifyMediaStreamTrack
LOG(LogLevel::Debug, ("%p, Adding MediaTrack with id %s",
this, NS_ConvertUTF16toUTF8(id).get()));
#endif
if (AudioStreamTrack* t = aTrack->AsAudioStreamTrack()) {
RefPtr<AudioTrack> audioTrack = CreateAudioTrack(t);
AudioTracks()->AddTrack(audioTrack);
+ audioTrack->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
} else if (VideoStreamTrack* t = aTrack->AsVideoStreamTrack()) {
// TODO: Fix this per the spec on bug 1273443.
- int32_t selectedIndex = VideoTracks()->SelectedIndex();
+ if (!IsVideo()) {
+ return;
+ }
RefPtr<VideoTrack> videoTrack = CreateVideoTrack(t);
VideoTracks()->AddTrack(videoTrack);
// New MediaStreamTrack added, set the new added video track as selected
// video track when there is no selected track.
- if (selectedIndex == -1) {
+ if (VideoTracks()->SelectedIndex() == -1) {
MOZ_ASSERT(!mSelectedVideoStreamTrack);
videoTrack->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
mMediaStreamSizeListener = new StreamSizeListener(this);
t->AddDirectListener(mMediaStreamSizeListener);
mSelectedVideoStreamTrack = t;
}
-
}
}
void
HTMLMediaElement::NotifyMediaStreamTrackRemoved(const RefPtr<MediaStreamTrack>& aTrack)
{
MOZ_ASSERT(aTrack);
@@ -3983,16 +4303,38 @@ void HTMLMediaElement::MetadataLoaded(co
// We are a video element playing video so update the screen wakelock
NotifyOwnerDocumentActivityChangedInternal();
}
if (mDefaultPlaybackStartPosition != 0.0) {
SetCurrentTime(mDefaultPlaybackStartPosition);
mDefaultPlaybackStartPosition = 0.0;
}
+
+ if (!mSrcStream) {
+ return;
+ }
+ for (OutputMediaStream& ms : mOutputStreams) {
+ for (size_t i = 0; i < AudioTracks()->Length(); ++i) {
+ AudioTrack* t = (*AudioTracks())[i];
+ if (t->Enabled()) {
+ AddCaptureMediaTrackToOutputStream(t, ms);
+ }
+ }
+ if (IsVideo() && !ms.mCapturingAudioOnly) {
+ // Only add video tracks if we're a video element and the output stream
+ // wants video.
+ for (size_t i = 0; i < VideoTracks()->Length(); ++i) {
+ VideoTrack* t = (*VideoTracks())[i];
+ if (t->Selected()) {
+ AddCaptureMediaTrackToOutputStream(t, ms);
+ }
+ }
+ }
+ }
}
void HTMLMediaElement::FirstFrameLoaded()
{
NS_ASSERTION(!mSuspendedAfterFirstFrame, "Should not have already suspended");
if (!mFirstFrameLoaded) {
mFirstFrameLoaded = true;
@@ -4087,16 +4429,18 @@ void HTMLMediaElement::PlaybackEnded()
AddRemoveSelfReference();
NS_ASSERTION(!mDecoder || mDecoder->IsEndedOrShutdown(),
"Decoder fired ended, but not in ended state");
// Discard all output streams that have finished now.
for (int32_t i = mOutputStreams.Length() - 1; i >= 0; --i) {
if (mOutputStreams[i].mFinishWhenEnded) {
+ LOG(LogLevel::Debug, ("Playback ended. Removing output stream %p",
+ mOutputStreams[i].mStream.get()));
mOutputStreams.RemoveElementAt(i);
}
}
if (mSrcStream || (mDecoder && mDecoder->IsInfinite())) {
LOG(LogLevel::Debug, ("%p, got duration by reaching the end of the resource", this));
DispatchAsyncEvent(NS_LITERAL_STRING("durationchange"));
}
--- a/dom/html/HTMLMediaElement.h
+++ b/dom/html/HTMLMediaElement.h
@@ -311,16 +311,17 @@ public:
/**
* Remove an added DecoderPrincipalChangeObserver from this media element.
*
* Returns true if it was successfully removed.
*/
bool RemoveDecoderPrincipalChangeObserver(DecoderPrincipalChangeObserver* aObserver);
+ class StreamCaptureTrackSource;
class DecoderCaptureTrackSource;
class CaptureStreamTrackSourceGetter;
// Update the visual size of the media. Called from the decoder on the
// main thread when/if the size changes.
void UpdateMediaSize(const nsIntSize& aSize);
// Like UpdateMediaSize, but only updates the size if no size has yet
// been set.
@@ -651,16 +652,19 @@ public:
bool ContainsRestrictedContent();
#endif // MOZ_EME
bool MozAutoplayEnabled() const
{
return mAutoplayEnabled;
}
+ already_AddRefed<DOMMediaStream> CaptureAudio(ErrorResult& aRv,
+ MediaStreamGraph* aGraph = nullptr);
+
already_AddRefed<DOMMediaStream> MozCaptureStream(ErrorResult& aRv,
MediaStreamGraph* aGraph = nullptr);
already_AddRefed<DOMMediaStream> MozCaptureStreamUntilEnded(ErrorResult& aRv,
MediaStreamGraph* aGraph = nullptr);
bool MozAudioCaptured() const
{
@@ -782,16 +786,34 @@ protected:
void UpdateWakeLock();
bool mValue;
bool mCanPlay;
HTMLMediaElement* mOuter;
nsCOMPtr<nsITimer> mTimer;
};
+ // Holds references to the DOM wrappers for the MediaStreams that we're
+ // writing to.
+ struct OutputMediaStream {
+ OutputMediaStream();
+ ~OutputMediaStream();
+
+ RefPtr<DOMMediaStream> mStream;
+ bool mFinishWhenEnded;
+ bool mCapturingAudioOnly;
+ bool mCapturingDecoder;
+ bool mCapturingMediaStream;
+
+ // The following members are keeping state for a captured MediaStream.
+ TrackID mNextAvailableTrackID;
+ ProcessedMediaStream* mOutputStream;
+ nsTArray<Pair<nsString, RefPtr<MediaInputPort>>> mTrackPorts;
+ };
+
nsresult PlayInternal(bool aCallerIsChrome);
/** Use this method to change the mReadyState member, so required
* events can be fired.
*/
void ChangeReadyState(nsMediaReadyState aState);
/**
@@ -854,23 +876,45 @@ protected:
/**
* Called by our DOMMediaStream::TrackListener when a MediaStreamTrack in
* |mSrcStream|'s playback stream has ended.
*/
void NotifyMediaStreamTrackRemoved(const RefPtr<MediaStreamTrack>& aTrack);
/**
- * Returns an nsDOMMediaStream containing the played contents of this
+ * Enables or disables all tracks forwarded from mSrcStream to all
+ * OutputMediaStreams. We do this for muting the tracks when pausing,
+ * and unmuting when playing the media element again.
+ *
+ * If mSrcStream is unset, this does nothing.
+ */
+ void SetCapturedOutputStreamsEnabled(bool aEnabled);
+
+ /**
+ * Create a new MediaStreamTrack for aTrack and add it to the DOMMediaStream
+ * in aOutputStream. This automatically sets the output track to enabled or
+ * disabled depending on our current playing state.
+ */
+ void AddCaptureMediaTrackToOutputStream(MediaTrack* aTrack,
+ OutputMediaStream& aOutputStream);
+
+ /**
+ * Returns an DOMMediaStream containing the played contents of this
* element. When aFinishWhenEnded is true, when this element ends playback
* we will finish the stream and not play any more into it.
* When aFinishWhenEnded is false, ending playback does not finish the stream.
* The stream will never finish.
+ *
+ * When aCaptureAudio is true, we stop playout of audio and instead route it
+ * to the DOMMediaStream. Volume and mute state will be applied to the audio
+ * reaching the stream. No video tracks will be captured in this case.
*/
already_AddRefed<DOMMediaStream> CaptureStreamInternal(bool aFinishWhenEnded,
+ bool aCaptureAudio,
MediaStreamGraph* aGraph = nullptr);
/**
* Initialize a decoder as a clone of an existing decoder in another
* element.
* mLoadingSrc must already be set.
*/
nsresult InitializeDecoderAsClone(MediaDecoder* aOriginal);
@@ -1240,20 +1284,16 @@ protected:
// mSrcStream.
double mSrcStreamPausedCurrentTime;
// Holds a reference to the stream connecting this stream to the capture sink.
RefPtr<MediaInputPort> mCaptureStreamPort;
// Holds references to the DOM wrappers for the MediaStreams that we're
// writing to.
- struct OutputMediaStream {
- RefPtr<DOMMediaStream> mStream;
- bool mFinishWhenEnded;
- };
nsTArray<OutputMediaStream> mOutputStreams;
// Holds a reference to the MediaStreamListener attached to mSrcStream's
// playback stream.
RefPtr<StreamListener> mMediaStreamListener;
// Holds a reference to the size-getting MediaStreamListener attached to
// mSrcStream.
RefPtr<StreamSizeListener> mMediaStreamSizeListener;