Bug 1299515 - Wire up track-disabling logic to frontend APIs. r?jib, r?johannh
This modifies mediaCaptureWindowState() to say whether a camera or microphone is
actively captured or not. Note that this is not the same as the device being
on or off. If we disallow a device from being off while disabled, we still
notify chrome that we're not actively capturing.
MozReview-Commit-ID: B1taormqc3j
--- a/browser/base/content/test/webrtc/get_user_media_content_script.js
+++ b/browser/base/content/test/webrtc/get_user_media_content_script.js
@@ -60,39 +60,40 @@ addMessageListener("Test:ExpectObserverC
});
addMessageListener("Test:ExpectNoObserverCalled", data => {
sendAsyncMessage("Test:ExpectNoObserverCalled:Reply", gObservedTopics);
gObservedTopics = {};
});
function _getMediaCaptureState() {
- let hasVideo = {};
- let hasAudio = {};
+ let hasCamera = {};
+ let hasMicrophone = {};
let hasScreenShare = {};
let hasWindowShare = {};
let hasAppShare = {};
let hasBrowserShare = {};
- MediaManagerService.mediaCaptureWindowState(content, hasVideo, hasAudio,
+ MediaManagerService.mediaCaptureWindowState(content,
+ hasCamera, hasMicrophone,
hasScreenShare, hasWindowShare,
hasAppShare, hasBrowserShare);
let result = {};
- if (hasVideo.value)
+ if (hasCamera.value != MediaManagerService.STATE_NOCAPTURE)
result.video = true;
- if (hasAudio.value)
+ if (hasMicrophone.value != MediaManagerService.STATE_NOCAPTURE)
result.audio = true;
- if (hasScreenShare.value)
+ if (hasScreenShare.value != MediaManagerService.STATE_NOCAPTURE)
result.screen = "Screen";
- else if (hasWindowShare.value)
+ else if (hasWindowShare.value != MediaManagerService.STATE_NOCAPTURE)
result.screen = "Window";
- else if (hasAppShare.value)
+ else if (hasAppShare.value != MediaManagerService.STATE_NOCAPTURE)
result.screen = "Application";
- else if (hasBrowserShare.value)
+ else if (hasBrowserShare.value != MediaManagerService.STATE_NOCAPTURE)
result.screen = "Browser";
return result;
}
addMessageListener("Test:GetMediaCaptureState", data => {
sendAsyncMessage("Test:MediaCaptureState", _getMediaCaptureState());
});
--- a/browser/modules/ContentWebRTC.jsm
+++ b/browser/modules/ContentWebRTC.jsm
@@ -310,19 +310,23 @@ function updateIndicators(aSubject, aTop
for (let contentWindow of contentWindows) {
if (contentWindow.document.documentURI == kBrowserURL) {
// There may be a preview shown at the same time as other streams.
continue;
}
let tabState = getTabStateForContentWindow(contentWindow);
- if (tabState.camera)
+ if (tabState.camera == MediaManagerService.STATE_CAPTURE_ENABLED)
+ state.showCameraIndicator = true;
+ if (tabState.camera == MediaManagerService.STATE_CAPTURE_DISABLED)
state.showCameraIndicator = true;
- if (tabState.microphone)
+ if (tabState.microphone == MediaManagerService.STATE_CAPTURE_ENABLED)
+ state.showMicrophoneIndicator = true;
+ if (tabState.microphone == MediaManagerService.STATE_CAPTURE_DISABLED)
state.showMicrophoneIndicator = true;
if (tabState.screen) {
if (tabState.screen == "Screen") {
state.showScreenSharingIndicator = "Screen";
} else if (tabState.screen == "Window") {
if (state.showScreenSharingIndicator != "Screen")
state.showScreenSharingIndicator = "Window";
} else if (tabState.screen == "Application") {
@@ -343,36 +347,39 @@ function updateIndicators(aSubject, aTop
function removeBrowserSpecificIndicator(aSubject, aTopic, aData) {
let contentWindow = Services.wm.getOuterWindowWithId(aData).top;
if (contentWindow.document.documentURI == kBrowserURL) {
// Ignore notifications caused by the browser UI showing previews.
return;
}
let tabState = getTabStateForContentWindow(contentWindow);
- if (!tabState.camera && !tabState.microphone && !tabState.screen)
+ if (tabState.camera == MediaManagerService.STATE_NOCAPTURE &&
+ tabState.microphone == MediaManagerService.STATE_NOCAPTURE &&
+ !tabState.screen)
tabState = {windowId: tabState.windowId};
let mm = getMessageManagerForWindow(contentWindow);
if (mm)
mm.sendAsyncMessage("webrtc:UpdateBrowserIndicators", tabState);
}
function getTabStateForContentWindow(aContentWindow) {
let camera = {}, microphone = {}, screen = {}, window = {}, app = {}, browser = {};
- MediaManagerService.mediaCaptureWindowState(aContentWindow, camera, microphone,
+ MediaManagerService.mediaCaptureWindowState(aContentWindow,
+ camera, microphone,
screen, window, app, browser);
let tabState = {camera: camera.value, microphone: microphone.value};
- if (screen.value)
+ if (screen.value != MediaManagerService.STATE_NOCAPTURE)
tabState.screen = "Screen";
- else if (window.value)
+ else if (window.value != MediaManagerService.STATE_NOCAPTURE)
tabState.screen = "Window";
- else if (app.value)
+ else if (app.value != MediaManagerService.STATE_NOCAPTURE)
tabState.screen = "Application";
- else if (browser.value)
+ else if (browser.value != MediaManagerService.STATE_NOCAPTURE)
tabState.screen = "Browser";
tabState.windowId = getInnerWindowIDForWindow(aContentWindow);
tabState.documentURI = aContentWindow.document.documentURI;
return tabState;
}
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -189,16 +189,48 @@ struct DeviceState {
// Any thread.
const RefPtr<MediaTimer> mDisableTimer;
// The underlying device we keep state for. Always non-null.
// Threadsafe access, but see method declarations for individual constraints.
const RefPtr<MediaDevice> mDevice;
};
+/**
+ * This mimics the capture state from nsIMediaManagerService.
+ */
+enum class CaptureState : uint16_t {
+ Off = nsIMediaManagerService::STATE_NOCAPTURE,
+ Enabled = nsIMediaManagerService::STATE_CAPTURE_ENABLED,
+ Disabled = nsIMediaManagerService::STATE_CAPTURE_DISABLED,
+};
+
+static CaptureState
+CombineCaptureState(CaptureState aFirst, CaptureState aSecond)
+{
+ if (aFirst == CaptureState::Enabled || aSecond == CaptureState::Enabled) {
+ return CaptureState::Enabled;
+ }
+ if (aFirst == CaptureState::Disabled || aSecond == CaptureState::Disabled) {
+ return CaptureState::Disabled;
+ }
+ MOZ_ASSERT(aFirst == CaptureState::Off);
+ MOZ_ASSERT(aSecond == CaptureState::Off);
+ return CaptureState::Off;
+}
+
+static uint16_t
+FromCaptureState(CaptureState aState)
+{
+ MOZ_ASSERT(aState == CaptureState::Off ||
+ aState == CaptureState::Enabled ||
+ aState == CaptureState::Disabled);
+ return static_cast<uint16_t>(aState);
+}
+
class SourceListener : public MediaStreamListener {
public:
SourceListener();
/**
* Registers this source listener as belonging to the given window listener.
*/
void Register(GetUserMediaWindowListener* aListener);
@@ -308,23 +340,17 @@ public:
{
return mStopped;
}
bool CapturingVideo() const;
bool CapturingAudio() const;
- bool CapturingScreen() const;
-
- bool CapturingWindow() const;
-
- bool CapturingApplication() const;
-
- bool CapturingBrowser() const;
+ CaptureState CapturingSource(MediaSourceEnum aSource) const;
already_AddRefed<PledgeVoid>
ApplyConstraintsToTrack(nsPIDOMWindowInner* aWindow,
TrackID aTrackID,
const dom::MediaTrackConstraints& aConstraints,
dom::CallerType aCallerType);
PrincipalHandle GetPrincipalHandle() const;
@@ -593,65 +619,36 @@ public:
MOZ_ASSERT(NS_IsMainThread());
for (auto& l : mActiveListeners) {
if (l->CapturingVideo()) {
return true;
}
}
return false;
}
+
bool CapturingAudio() const
{
MOZ_ASSERT(NS_IsMainThread());
for (auto& l : mActiveListeners) {
if (l->CapturingAudio()) {
return true;
}
}
return false;
}
- bool CapturingScreen() const
- {
- MOZ_ASSERT(NS_IsMainThread());
- for (auto& l : mActiveListeners) {
- if (l->CapturingScreen()) {
- return true;
- }
- }
- return false;
- }
- bool CapturingWindow() const
+
+ CaptureState CapturingSource(MediaSourceEnum aSource) const
{
MOZ_ASSERT(NS_IsMainThread());
- for (auto& l : mActiveListeners) {
- if (l->CapturingWindow()) {
- return true;
- }
- }
- return false;
- }
- bool CapturingApplication() const
- {
- MOZ_ASSERT(NS_IsMainThread());
+ CaptureState result = CaptureState::Off;
for (auto& l : mActiveListeners) {
- if (l->CapturingApplication()) {
- return true;
- }
+ result = CombineCaptureState(result, l->CapturingSource(aSource));
}
- return false;
- }
- bool CapturingBrowser() const
- {
- MOZ_ASSERT(NS_IsMainThread());
- for (auto& l : mActiveListeners) {
- if (l->CapturingBrowser()) {
- return true;
- }
- }
- return false;
+ return result;
}
uint64_t WindowID() const
{
return mWindowID;
}
PrincipalHandle GetPrincipalHandle() const { return mPrincipalHandle; }
@@ -1114,16 +1111,19 @@ public:
nsMainThreadPtrHandle<DOMMediaStream> domStream;
RefPtr<SourceMediaStream> stream;
// AudioCapture is a special case, here, in the sense that we're not really
// using the audio source and the SourceMediaStream, which acts as
// placeholders. We re-route a number of stream internaly in the MSG and mix
// them down instead.
if (mAudioDevice &&
mAudioDevice->GetMediaSource() == MediaSourceEnum::AudioCapture) {
+ NS_WARNING("MediaCaptureWindowState doesn't handle "
+ "MediaSourceEnum::AudioCapture. This must be fixed with UX "
+ "before shipping.");
// It should be possible to pipe the capture stream to anything. CORS is
// not a problem here, we got explicit user content.
nsCOMPtr<nsIPrincipal> principal = window->GetExtantDoc()->NodePrincipal();
domStream = new nsMainThreadPtrHolder<DOMMediaStream>(
"GetUserMediaStreamRunnable::AudioCaptureDOMStreamMainThreadHolder",
DOMMediaStream::CreateAudioCaptureStreamAsInput(window, principal, msg));
stream = msg->CreateSourceStream(); // Placeholder
@@ -3473,100 +3473,105 @@ MediaManager::GetActiveMediaCaptureWindo
nsPIDOMWindowInner* window =
nsGlobalWindowInner::GetInnerWindowWithId(id)->AsInner();
MOZ_ASSERT(window);
// XXXkhuey ...
if (!window) {
continue;
}
- if (winListener->CapturingVideo() || winListener->CapturingAudio() ||
- winListener->CapturingScreen() || winListener->CapturingWindow() ||
- winListener->CapturingApplication()) {
+ if (winListener->CapturingVideo() || winListener->CapturingAudio()) {
array->AppendElement(window);
}
}
array.forget(aArray);
return NS_OK;
}
-// XXX flags might be better...
struct CaptureWindowStateData {
- bool *mVideo;
- bool *mAudio;
- bool *mScreenShare;
- bool *mWindowShare;
- bool *mAppShare;
- bool *mBrowserShare;
+ uint16_t* mCamera;
+ uint16_t* mMicrophone;
+ uint16_t* mScreenShare;
+ uint16_t* mWindowShare;
+ uint16_t* mAppShare;
+ uint16_t* mBrowserShare;
};
static void
CaptureWindowStateCallback(MediaManager *aThis,
uint64_t aWindowID,
- GetUserMediaWindowListener *aListener,
+ GetUserMediaWindowListener *aListener,
void *aData)
{
- struct CaptureWindowStateData *data = (struct CaptureWindowStateData *) aData;
+ MOZ_ASSERT(aData);
+
+ auto& data = *static_cast<CaptureWindowStateData*>(aData);
if (!aListener) {
return;
}
- if (aListener->CapturingVideo()) {
- *data->mVideo = true;
- }
- if (aListener->CapturingAudio()) {
- *data->mAudio = true;
- }
- if (aListener->CapturingScreen()) {
- *data->mScreenShare = true;
- }
- if (aListener->CapturingWindow()) {
- *data->mWindowShare = true;
- }
- if (aListener->CapturingApplication()) {
- *data->mAppShare = true;
- }
- if (aListener->CapturingBrowser()) {
- *data->mBrowserShare = true;
- }
+ *data.mCamera =
+ FromCaptureState(aListener->CapturingSource(MediaSourceEnum::Camera));
+ *data.mMicrophone =
+ FromCaptureState(aListener->CapturingSource(MediaSourceEnum::Microphone));
+ *data.mScreenShare =
+ FromCaptureState(aListener->CapturingSource(MediaSourceEnum::Screen));
+ *data.mWindowShare =
+ FromCaptureState(aListener->CapturingSource(MediaSourceEnum::Window));
+ *data.mAppShare =
+ FromCaptureState(aListener->CapturingSource(MediaSourceEnum::Application));
+ *data.mBrowserShare =
+ FromCaptureState(aListener->CapturingSource(MediaSourceEnum::Browser));
}
NS_IMETHODIMP
-MediaManager::MediaCaptureWindowState(nsIDOMWindow* aWindow, bool* aVideo,
- bool* aAudio, bool *aScreenShare,
- bool* aWindowShare, bool *aAppShare,
- bool *aBrowserShare)
+MediaManager::MediaCaptureWindowState(nsIDOMWindow* aWindow,
+ uint16_t* aCamera,
+ uint16_t* aMicrophone,
+ uint16_t* aScreenShare,
+ uint16_t* aWindowShare,
+ uint16_t* aAppShare,
+ uint16_t* aBrowserShare)
{
MOZ_ASSERT(NS_IsMainThread());
struct CaptureWindowStateData data;
- data.mVideo = aVideo;
- data.mAudio = aAudio;
+ data.mCamera = aCamera;
+ data.mMicrophone = aMicrophone;
data.mScreenShare = aScreenShare;
data.mWindowShare = aWindowShare;
data.mAppShare = aAppShare;
data.mBrowserShare = aBrowserShare;
- *aVideo = false;
- *aAudio = false;
- *aScreenShare = false;
- *aWindowShare = false;
- *aAppShare = false;
- *aBrowserShare = false;
+ *aCamera = nsIMediaManagerService::STATE_NOCAPTURE;
+ *aMicrophone = nsIMediaManagerService::STATE_NOCAPTURE;
+ *aScreenShare = nsIMediaManagerService::STATE_NOCAPTURE;
+ *aWindowShare = nsIMediaManagerService::STATE_NOCAPTURE;
+ *aAppShare = nsIMediaManagerService::STATE_NOCAPTURE;
+ *aBrowserShare = nsIMediaManagerService::STATE_NOCAPTURE;
nsCOMPtr<nsPIDOMWindowInner> piWin = do_QueryInterface(aWindow);
if (piWin) {
IterateWindowListeners(piWin, CaptureWindowStateCallback, &data);
}
#ifdef DEBUG
LOG(("%s: window %" PRIu64 " capturing %s %s %s %s %s %s", __FUNCTION__, piWin ? piWin->WindowID() : -1,
- *aVideo ? "video" : "", *aAudio ? "audio" : "",
- *aScreenShare ? "screenshare" : "", *aWindowShare ? "windowshare" : "",
- *aAppShare ? "appshare" : "", *aBrowserShare ? "browsershare" : ""));
+ *aCamera == nsIMediaManagerService::STATE_CAPTURE_ENABLED
+ ? "camera (enabled)"
+ : (*aCamera == nsIMediaManagerService::STATE_CAPTURE_DISABLED
+ ? "camera (disabled)" : ""),
+ *aMicrophone == nsIMediaManagerService::STATE_CAPTURE_ENABLED
+ ? "microphone (enabled)"
+ : (*aMicrophone == nsIMediaManagerService::STATE_CAPTURE_DISABLED
+ ? "microphone (disabled)" : ""),
+ *aScreenShare ? "screenshare" : "",
+ *aWindowShare ? "windowshare" : "",
+ *aAppShare ? "appshare" : "",
+ *aBrowserShare ? "browsershare" : ""));
#endif
return NS_OK;
}
NS_IMETHODIMP
MediaManager::SanitizeDeviceIds(int64_t aSinceWhen)
{
MOZ_ASSERT(NS_IsMainThread());
@@ -4147,68 +4152,65 @@ SourceListener::NotifyRemoved()
mWindowListener = nullptr;
}
bool
SourceListener::CapturingVideo() const
{
MOZ_ASSERT(NS_IsMainThread());
- return Activated() && mVideoDeviceState &&
- !mVideoDeviceState->mStopped &&
- mVideoDeviceState->mDevice->GetMediaSource() == dom::MediaSourceEnum::Camera &&
+ return Activated() && mVideoDeviceState && !mVideoDeviceState->mStopped &&
(!mVideoDeviceState->mDevice->mSource->IsFake() ||
Preferences::GetBool("media.navigator.permission.fake"));
}
bool
SourceListener::CapturingAudio() const
{
MOZ_ASSERT(NS_IsMainThread());
- return Activated() && mAudioDeviceState &&
- !mAudioDeviceState->mStopped &&
- mAudioDeviceState->mDevice->GetMediaSource() == dom::MediaSourceEnum::Microphone &&
+ return Activated() && mAudioDeviceState && !mAudioDeviceState->mStopped &&
(mAudioDeviceState->mDevice->mSource->IsFake() ||
Preferences::GetBool("media.navigator.permission.fake"));
}
-bool
-SourceListener::CapturingScreen() const
-{
- MOZ_ASSERT(NS_IsMainThread());
- return Activated() && mVideoDeviceState &&
- !mVideoDeviceState->mStopped &&
- mVideoDeviceState->mDevice->GetMediaSource() == dom::MediaSourceEnum::Screen;
-}
-
-bool
-SourceListener::CapturingWindow() const
+CaptureState
+SourceListener::CapturingSource(MediaSourceEnum aSource) const
{
MOZ_ASSERT(NS_IsMainThread());
- return Activated() && mVideoDeviceState &&
- !mVideoDeviceState->mStopped &&
- mVideoDeviceState->mDevice->GetMediaSource() == dom::MediaSourceEnum::Window;
-}
-
-bool
-SourceListener::CapturingApplication() const
-{
- MOZ_ASSERT(NS_IsMainThread());
- return Activated() && mVideoDeviceState &&
- !mVideoDeviceState->mStopped &&
- mVideoDeviceState->mDevice->GetMediaSource() == dom::MediaSourceEnum::Application;
-}
-
-bool
-SourceListener::CapturingBrowser() const
-{
- MOZ_ASSERT(NS_IsMainThread());
- return Activated() && mVideoDeviceState &&
- !mVideoDeviceState->mStopped &&
- mVideoDeviceState->mDevice->GetMediaSource() == dom::MediaSourceEnum::Browser;
+ if ((!GetVideoDevice() || GetVideoDevice()->GetMediaSource() != aSource) &&
+ (!GetAudioDevice() || GetAudioDevice()->GetMediaSource() != aSource)) {
+ // This SourceListener doesn't capture a matching source
+ return CaptureState::Off;
+ }
+
+ DeviceState& state =
+ (GetAudioDevice() && GetAudioDevice()->GetMediaSource() == aSource)
+ ? *mAudioDeviceState : *mVideoDeviceState;
+ MOZ_ASSERT(state.mDevice->GetMediaSource() == aSource);
+
+ if (state.mStopped) {
+ // The source is a match but has been permanently stopped
+ return CaptureState::Off;
+ }
+
+ if ((aSource == MediaSourceEnum::Camera ||
+ aSource == MediaSourceEnum::Microphone) &&
+ state.mDevice->mSource->IsFake() &&
+ !Preferences::GetBool("media.navigator.permission.fake")) {
+ // Fake Camera and Microphone only count if there is no fake permission
+ return CaptureState::Off;
+ }
+
+ // Source is a match and is active
+
+ if (state.mDeviceEnabled) {
+ return CaptureState::Enabled;
+ }
+
+ return CaptureState::Disabled;
}
already_AddRefed<PledgeVoid>
SourceListener::ApplyConstraintsToTrack(
nsPIDOMWindowInner* aWindow,
TrackID aTrackID,
const MediaTrackConstraints& aConstraintsPassedIn,
dom::CallerType aCallerType)
--- a/dom/media/nsIMediaManager.idl
+++ b/dom/media/nsIMediaManager.idl
@@ -13,17 +13,26 @@ interface nsIDOMWindow;
%}
[scriptable, builtinclass, uuid(24b23e01-33fd-401f-ba25-6e52658750b0)]
interface nsIMediaManagerService : nsISupports
{
/* return a array of inner windows that have active captures */
readonly attribute nsIArray activeMediaCaptureWindows;
+ /* possible states for camera and microphone capture */
+ const unsigned short STATE_NOCAPTURE = 0;
+ const unsigned short STATE_CAPTURE_ENABLED = 1;
+ const unsigned short STATE_CAPTURE_DISABLED = 2;
+
/* Get the capture state for the given window and all descendant windows (iframes, etc) */
- void mediaCaptureWindowState(in nsIDOMWindow aWindow, out boolean aVideo, out boolean aAudio,
- [optional] out boolean aScreenShare, [optional] out boolean aWindowShare,
- [optional] out boolean aAppShare, [optional] out boolean aBrowserShare);
+ void mediaCaptureWindowState(in nsIDOMWindow aWindow,
+ out unsigned short aCamera,
+ out unsigned short aMicrophone,
+ [optional] out unsigned short aScreenShare,
+ [optional] out unsigned short aWindowShare,
+ [optional] out unsigned short aAppShare,
+ [optional] out unsigned short aBrowserShare);
/* Clear per-orgin list of persistent DeviceIds stored for enumerateDevices
sinceTime is milliseconds since 1 January 1970 00:00:00 UTC. 0 = clear all */
void sanitizeDeviceIds(in long long sinceWhen);
};
--- a/mobile/android/modules/WebrtcUI.jsm
+++ b/mobile/android/modules/WebrtcUI.jsm
@@ -100,21 +100,21 @@ var WebrtcUI = {
light: [0xFF9500FF, 1000, 1000],
ongoing: true
};
let cameraActive = false;
let audioActive = false;
for (let i = 0; i < count; i++) {
let win = windows.queryElementAt(i, Ci.nsIDOMWindow);
- let hasAudio = {};
- let hasVideo = {};
- MediaManagerService.mediaCaptureWindowState(win, hasVideo, hasAudio);
- if (hasVideo.value) cameraActive = true;
- if (hasAudio.value) audioActive = true;
+ let hasCamera = {};
+ let hasMicrophone = {};
+ MediaManagerService.mediaCaptureWindowState(win, hasCamera, hasMicrophone);
+ if (hasCamera.value != MediaManagerService.STATE_NOCAPTURE) cameraActive = true;
+ if (hasMicrophone.value != MediaManagerService.STATE_NOCAPTURE) audioActive = true;
}
if (cameraActive && audioActive) {
notificationOptions.message = Strings.browser.GetStringFromName("getUserMedia.sharingCameraAndMicrophone.message2");
notificationOptions.icon = "drawable:alert_mic_camera";
} else if (cameraActive) {
notificationOptions.message = Strings.browser.GetStringFromName("getUserMedia.sharingCamera.message2");
notificationOptions.icon = "drawable:alert_camera";