Bug 1299515 - Flatten MediaEngineSource class hierarchy. r?jib draft
authorAndreas Pehrson <pehrsons@mozilla.com>
Wed, 24 Jan 2018 16:49:13 +0100
changeset 749421 523a65381727c88381e83b9b319fc8377b4353e8
parent 748938 2afb74befd9a0313e34aa66e0599e5b15ae4bd57
child 749422 ca0129a4d9a50e55d4073b717463831a7ecbb850
push id97396
push userbmo:apehrson@mozilla.com
push dateWed, 31 Jan 2018 13:27:39 +0000
reviewersjib
bugs1299515
milestone60.0a1
Bug 1299515 - Flatten MediaEngineSource class hierarchy. r?jib The scope of flattening this hierarchy quickly grows large, so this patch does a couple more things: - Creates a pure interface MediaEngineSourceInterface and a base class MediaEngineSource with common defaults and refcount support (no state!) - Breaks out some of the helper classes to dedicated files, e.g., AllocationHandle, MediaEnginePrefs. - Clarifies the threading model (written on one thread *and* under lock, read under either) - Fixes style, indentation, include-sorting in the affected files - Adds comments, especially for clarifying what responsibilities methods have, and thread usage of class members - Changes Monitors to Mutexes since we only use them as Mutexes anyhow - Makes MediaEngineRemoteVideoSource no longer a shared source since we now support scaling in this source and CamerasChild can act as a broker of frames. This greatly simplifies it. The only shared source is now MediaEngineWebRTCMicrophoneSource, so the sharing specific common methods have been moved to that source. MozReview-Commit-ID: KeVZQo6gLm2
dom/media/GetUserMediaRequest.cpp
dom/media/GetUserMediaRequest.h
dom/media/MediaManager.cpp
dom/media/MediaManager.h
dom/media/imagecapture/ImageCapture.cpp
dom/media/systemservices/CamerasChild.cpp
dom/media/systemservices/CamerasChild.h
dom/media/systemservices/CamerasParent.cpp
dom/media/systemservices/PCameras.ipdl
dom/media/webrtc/AllocationHandle.h
dom/media/webrtc/MediaEngine.h
dom/media/webrtc/MediaEngineCameraVideoSource.cpp
dom/media/webrtc/MediaEngineCameraVideoSource.h
dom/media/webrtc/MediaEngineDefault.cpp
dom/media/webrtc/MediaEngineDefault.h
dom/media/webrtc/MediaEnginePrefs.h
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
dom/media/webrtc/MediaEngineRemoteVideoSource.h
dom/media/webrtc/MediaEngineSource.cpp
dom/media/webrtc/MediaEngineSource.h
dom/media/webrtc/MediaEngineTabVideoSource.cpp
dom/media/webrtc/MediaEngineTabVideoSource.h
dom/media/webrtc/MediaEngineWebRTC.cpp
dom/media/webrtc/MediaEngineWebRTC.h
dom/media/webrtc/MediaEngineWebRTCAudio.cpp
dom/media/webrtc/MediaTrackConstraints.cpp
dom/media/webrtc/MediaTrackConstraints.h
dom/media/webrtc/moz.build
dom/media/webspeech/recognition/SpeechRecognition.cpp
--- a/dom/media/GetUserMediaRequest.cpp
+++ b/dom/media/GetUserMediaRequest.cpp
@@ -1,14 +1,15 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
+#include "GetUserMediaRequest.h"
+
 #include "base/basictypes.h"
-#include "GetUserMediaRequest.h"
 #include "mozilla/dom/MediaStreamBinding.h"
 #include "mozilla/dom/GetUserMediaRequestBinding.h"
 #include "nsIScriptGlobalObject.h"
 #include "nsPIDOMWindow.h"
 
 namespace mozilla {
 namespace dom {
 
--- a/dom/media/GetUserMediaRequest.h
+++ b/dom/media/GetUserMediaRequest.h
@@ -40,17 +40,17 @@ public:
   bool IsSecure();
   bool IsHandlingUserInput() const;
   void GetCallID(nsString& retval);
   void GetRawID(nsString& retval);
   void GetMediaSource(nsString& retval);
   void GetConstraints(MediaStreamConstraints &result);
 
 private:
-  virtual ~GetUserMediaRequest() {}
+  virtual ~GetUserMediaRequest() = default;
 
   uint64_t mInnerWindowID, mOuterWindowID;
   const nsString mCallID;
   const nsString mRawID;
   const nsString mMediaSource;
   nsAutoPtr<MediaStreamConstraints> mConstraints;
   bool mIsSecure;
   bool mIsHandlingUserInput;
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -1,16 +1,17 @@
 /* -*- Mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; tab-width: 40 -*- */
 /* vim: set ts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaManager.h"
 
+#include "AllocationHandle.h"
 #include "MediaStreamGraph.h"
 #include "mozilla/dom/MediaStreamTrack.h"
 #include "MediaStreamListener.h"
 #include "nsArray.h"
 #include "nsContentUtils.h"
 #include "nsGlobalWindow.h"
 #include "nsHashPropertyBag.h"
 #include "nsIEventTarget.h"
@@ -83,25 +84,20 @@
 // GetCurrentTime is defined in winbase.h as zero argument macro forwarding to
 // GetTickCount() and conflicts with MediaStream::GetCurrentTime.
 #ifdef GetCurrentTime
 #undef GetCurrentTime
 #endif
 
 // XXX Workaround for bug 986974 to maintain the existing broken semantics
 template<>
-struct nsIMediaDevice::COMTypeInfo<mozilla::VideoDevice, void> {
+struct nsIMediaDevice::COMTypeInfo<mozilla::MediaDevice, void> {
   static const nsIID kIID;
 };
-const nsIID nsIMediaDevice::COMTypeInfo<mozilla::VideoDevice, void>::kIID = NS_IMEDIADEVICE_IID;
-template<>
-struct nsIMediaDevice::COMTypeInfo<mozilla::AudioDevice, void> {
-  static const nsIID kIID;
-};
-const nsIID nsIMediaDevice::COMTypeInfo<mozilla::AudioDevice, void>::kIID = NS_IMEDIADEVICE_IID;
+const nsIID nsIMediaDevice::COMTypeInfo<mozilla::MediaDevice, void>::kIID = NS_IMEDIADEVICE_IID;
 
 namespace {
 already_AddRefed<nsIAsyncShutdownClient> GetShutdownPhase() {
   nsCOMPtr<nsIAsyncShutdownService> svc = mozilla::services::GetAsyncShutdown();
   MOZ_RELEASE_ASSERT(svc);
 
   nsCOMPtr<nsIAsyncShutdownClient> shutdownPhase;
   nsresult rv = svc->GetProfileBeforeChange(getter_AddRefs(shutdownPhase));
@@ -155,26 +151,31 @@ static Atomic<bool> sHasShutdown;
 
 typedef media::Pledge<bool, dom::MediaStreamError*> PledgeVoid;
 
 class SourceListener : public MediaStreamListener {
 public:
   SourceListener();
 
   /**
+   * Returns the current device for the given track.
+   */
+  MediaDevice* GetDevice(TrackID aTrackID) const;
+
+  /**
    * Registers this source listener as belonging to the given window listener.
    */
   void Register(GetUserMediaWindowListener* aListener);
 
   /**
    * Marks this listener as active and adds itself as a listener to aStream.
    */
   void Activate(SourceMediaStream* aStream,
-                AudioDevice* aAudioDevice,
-                VideoDevice* aVideoDevice);
+                MediaDevice* aAudioDevice,
+                MediaDevice* aVideoDevice);
 
   /**
    * Stops all live tracks, finishes the associated MediaStream and cleans up.
    */
   void Stop();
 
   /**
    * Removes this SourceListener from its associated MediaStream and marks it
@@ -210,39 +211,41 @@ public:
 
   MediaStream* Stream() const
   {
     return mStream;
   }
 
   SourceMediaStream* GetSourceStream();
 
-  AudioDevice* GetAudioDevice() const
+  MediaDevice* GetAudioDevice() const
   {
     return mAudioDevice;
   }
 
-  VideoDevice* GetVideoDevice() const
+  MediaDevice* GetVideoDevice() const
   {
     return mVideoDevice;
   }
 
-  void GetSettings(dom::MediaTrackSettings& aOutSettings, TrackID aTrackID);
-
   void NotifyPull(MediaStreamGraph* aGraph,
                   StreamTime aDesiredTime) override;
 
   void NotifyEvent(MediaStreamGraph* aGraph,
                    MediaStreamGraphEvent aEvent) override;
 
+  /**
+   * Called on main thread after MediaStreamGraph notifies us that our
+   * MediaStream was marked finish in the graph.
+   */
   void NotifyFinished();
 
   /**
-   * this can be in response to our own RemoveListener() (via ::Remove()), or
-   * because the DOM GC'd the DOMLocalMediaStream/etc we're attached to.
+   * Called on main thread after MediaStreamGraph notifies us that we
+   * were removed as listener from the MediaStream in the graph.
    */
   void NotifyRemoved();
 
   bool Activated() const
   {
     return mStream;
   }
 
@@ -297,18 +300,18 @@ private:
 
   // Weak pointer to the window listener that owns us. MainThread only.
   GetUserMediaWindowListener* mWindowListener;
 
   // Set at Activate on MainThread
 
   // Accessed from MediaStreamGraph thread, MediaManager thread, and MainThread
   // No locking needed as they're only addrefed except on the MediaManager thread
-  RefPtr<AudioDevice> mAudioDevice; // threadsafe refcnt
-  RefPtr<VideoDevice> mVideoDevice; // threadsafe refcnt
+  RefPtr<MediaDevice> mAudioDevice; // threadsafe refcnt
+  RefPtr<MediaDevice> mVideoDevice; // threadsafe refcnt
   RefPtr<SourceMediaStream> mStream; // threadsafe refcnt
 };
 
 /**
  * This class represents a WindowID and handles all MediaStreamListeners
  * (here subclassed as SourceListeners) used to feed GetUserMedia source
  * streams. It proxies feedback from them into messages for browser chrome.
  * The SourceListeners are used to Start() and Stop() the underlying
@@ -354,18 +357,18 @@ public:
   }
 
   /**
    * Activates an already registered and inactive gUM source listener for this
    * WindowListener.
    */
   void Activate(SourceListener* aListener,
                 SourceMediaStream* aStream,
-                AudioDevice* aAudioDevice,
-                VideoDevice* aVideoDevice)
+                MediaDevice* aAudioDevice,
+                MediaDevice* aVideoDevice)
   {
     MOZ_ASSERT(NS_IsMainThread());
 
     if (!aListener || aListener->Activated()) {
       MOZ_ASSERT(false, "Cannot activate already activated source listener");
       return;
     }
 
@@ -453,24 +456,24 @@ public:
                "mInactiveListeners and mActiveListeners");
     MOZ_ASSERT(!mActiveListeners.Contains(aListener),
                "A SourceListener should only be once in one of "
                "mInactiveListeners and mActiveListeners");
 
     LOG(("GUMWindowListener %p removing SourceListener %p.", this, aListener));
     aListener->Remove();
 
-    if (VideoDevice* removedDevice = aListener->GetVideoDevice()) {
+    if (MediaDevice* removedDevice = aListener->GetVideoDevice()) {
       bool revokeVideoPermission = true;
       nsString removedRawId;
       nsString removedSourceType;
       removedDevice->GetRawId(removedRawId);
       removedDevice->GetMediaSource(removedSourceType);
       for (const auto& l : mActiveListeners) {
-        if (VideoDevice* device = l->GetVideoDevice()) {
+        if (MediaDevice* device = l->GetVideoDevice()) {
           nsString rawId;
           device->GetRawId(rawId);
           if (removedRawId.Equals(rawId)) {
             revokeVideoPermission = false;
             break;
           }
         }
       }
@@ -481,24 +484,24 @@ public:
         nsPIDOMWindowInner* window = globalWindow ? globalWindow->AsInner()
                                                   : nullptr;
         RefPtr<GetUserMediaRequest> req =
           new GetUserMediaRequest(window, removedRawId, removedSourceType);
         obs->NotifyObservers(req, "recording-device-stopped", nullptr);
       }
     }
 
-    if (AudioDevice* removedDevice = aListener->GetAudioDevice()) {
+    if (MediaDevice* removedDevice = aListener->GetAudioDevice()) {
       bool revokeAudioPermission = true;
       nsString removedRawId;
       nsString removedSourceType;
       removedDevice->GetRawId(removedRawId);
       removedDevice->GetMediaSource(removedSourceType);
       for (const auto& l : mActiveListeners) {
-        if (AudioDevice* device = l->GetAudioDevice()) {
+        if (MediaDevice* device = l->GetAudioDevice()) {
           nsString rawId;
           device->GetRawId(rawId);
           if (removedRawId.Equals(rawId)) {
             revokeAudioPermission = false;
             break;
           }
         }
       }
@@ -678,32 +681,30 @@ private:
   RefPtr<MediaManager> mManager; // get ref to this when creating the runnable
 };
 
 /**
  * nsIMediaDevice implementation.
  */
 NS_IMPL_ISUPPORTS(MediaDevice, nsIMediaDevice)
 
-MediaDevice::MediaDevice(MediaEngineSource* aSource, bool aIsVideo)
-  : mScary(aSource->GetScary())
-  , mMediaSource(aSource->GetMediaSource())
-  , mSource(aSource)
-  , mIsVideo(aIsVideo)
+MediaDevice::MediaDevice(MediaEngineSource* aSource,
+                         const nsString& aName,
+                         const nsString& aID,
+                         const nsString& aRawID)
+  : mSource(aSource)
+  , mIsVideo(MediaEngineSource::IsVideo(mSource->GetMediaSource()))
+  , mScary(mSource->GetScary())
+  , mType(mIsVideo ? NS_LITERAL_STRING("video") : NS_LITERAL_STRING("audio"))
+  , mName(aName)
+  , mID(aID)
+  , mRawID(aRawID)
 {
-  mSource->GetName(mName);
-  nsCString id;
-  mSource->GetUUID(id);
-  CopyUTF8toUTF16(id, mID);
 }
 
-VideoDevice::VideoDevice(MediaEngineVideoSource* aSource)
-  : MediaDevice(aSource, true)
-{}
-
 /**
  * Helper functions that implement the constraints algorithm from
  * http://dev.w3.org/2011/webrtc/editor/getusermedia.html#methods-5
  */
 
 bool
 MediaDevice::StringsContain(const OwningStringOrStringSequence& aStrings,
                             nsString aN)
@@ -762,55 +763,31 @@ MediaDevice::GetBestFitnessDistance(
       if (constraint->mMediaSource.mIdeal.find(mediaSource) ==
           constraint->mMediaSource.mIdeal.end()) {
         return UINT32_MAX;
       }
     }
   }
   // Forward request to underlying object to interrogate per-mode capabilities.
   // Pass in device's origin-specific id for deviceId constraint comparison.
-  nsString id;
-  if (aIsChrome) {
-    GetRawId(id);
-  } else {
-    GetId(id);
-  }
+  const nsString& id = aIsChrome ? mRawID : mID;
   return mSource->GetBestFitnessDistance(aConstraintSets, id);
 }
 
-AudioDevice::AudioDevice(MediaEngineAudioSource* aSource)
-  : MediaDevice(aSource, false)
-{
-  mMediaSource = aSource->GetMediaSource();
-}
-
 NS_IMETHODIMP
 MediaDevice::GetName(nsAString& aName)
 {
   aName.Assign(mName);
   return NS_OK;
 }
 
 NS_IMETHODIMP
 MediaDevice::GetType(nsAString& aType)
 {
-  return NS_OK;
-}
-
-NS_IMETHODIMP
-VideoDevice::GetType(nsAString& aType)
-{
-  aType.AssignLiteral(u"video");
-  return NS_OK;
-}
-
-NS_IMETHODIMP
-AudioDevice::GetType(nsAString& aType)
-{
-  aType.AssignLiteral(u"audio");
+  aType.Assign(mType);
   return NS_OK;
 }
 
 NS_IMETHODIMP
 MediaDevice::GetId(nsAString& aID)
 {
   aID.Assign(mID);
   return NS_OK;
@@ -826,73 +803,92 @@ MediaDevice::GetRawId(nsAString& aID)
 NS_IMETHODIMP
 MediaDevice::GetScary(bool* aScary)
 {
   *aScary = mScary;
   return NS_OK;
 }
 
 void
-MediaDevice::SetId(const nsAString& aID)
+MediaDevice::GetSettings(dom::MediaTrackSettings& aOutSettings) const
 {
-  mID.Assign(aID);
-}
-
-void
-MediaDevice::SetRawId(const nsAString& aID)
-{
-  mRawID.Assign(aID);
+  mSource->GetSettings(aOutSettings);
 }
 
 NS_IMETHODIMP
 MediaDevice::GetMediaSource(nsAString& aMediaSource)
 {
-  if (mMediaSource == MediaSourceEnum::Microphone) {
+  MediaSourceEnum source = GetMediaSource();
+  if (source == MediaSourceEnum::Microphone) {
     aMediaSource.AssignLiteral(u"microphone");
-  } else if (mMediaSource == MediaSourceEnum::AudioCapture) {
+  } else if (source == MediaSourceEnum::AudioCapture) {
     aMediaSource.AssignLiteral(u"audioCapture");
-  } else if (mMediaSource == MediaSourceEnum::Window) { // this will go away
+  } else if (source == MediaSourceEnum::Window) { // this will go away
     aMediaSource.AssignLiteral(u"window");
   } else { // all the rest are shared
     aMediaSource.Assign(NS_ConvertUTF8toUTF16(
-      dom::MediaSourceEnumValues::strings[uint32_t(mMediaSource)].value));
+      dom::MediaSourceEnumValues::strings[uint32_t(source)].value));
   }
   return NS_OK;
 }
 
-VideoDevice::Source*
-VideoDevice::GetSource()
-{
-  return static_cast<Source*>(&*mSource);
-}
-
-AudioDevice::Source*
-AudioDevice::GetSource()
-{
-  return static_cast<Source*>(&*mSource);
-}
-
 nsresult MediaDevice::Allocate(const dom::MediaTrackConstraints &aConstraints,
                                const MediaEnginePrefs &aPrefs,
                                const ipc::PrincipalInfo& aPrincipalInfo,
-                               const char** aOutBadConstraint) {
-  return GetSource()->Allocate(aConstraints, aPrefs, mID, aPrincipalInfo,
-                               getter_AddRefs(mAllocationHandle),
-                               aOutBadConstraint);
+                               const char** aOutBadConstraint)
+{
+  return mSource->Allocate(aConstraints,
+                           aPrefs,
+                           mID,
+                           aPrincipalInfo,
+                           getter_AddRefs(mAllocationHandle),
+                           aOutBadConstraint);
 }
 
-nsresult MediaDevice::Restart(const dom::MediaTrackConstraints &aConstraints,
+nsresult MediaDevice::Start(SourceMediaStream* aStream,
+                            TrackID aTrackID,
+                            const PrincipalHandle& aPrincipal)
+
+{
+  return mSource->Start(aStream, aTrackID, aPrincipal);
+}
+
+nsresult MediaDevice::Reconfigure(const dom::MediaTrackConstraints &aConstraints,
                               const MediaEnginePrefs &aPrefs,
-                              const char** aOutBadConstraint) {
-  return GetSource()->Restart(mAllocationHandle, aConstraints, aPrefs, mID,
+                              const char** aOutBadConstraint)
+{
+  return mSource->Reconfigure(mAllocationHandle,
+                              aConstraints,
+                              aPrefs,
+                              mID,
                               aOutBadConstraint);
 }
 
-nsresult MediaDevice::Deallocate() {
-  return GetSource()->Deallocate(mAllocationHandle);
+nsresult MediaDevice::Stop(SourceMediaStream* aStream, TrackID aTrackID)
+{
+  return mSource->Stop(aStream, aTrackID);
+}
+
+nsresult MediaDevice::Deallocate()
+{
+  return mSource->Deallocate(mAllocationHandle);
+}
+
+void MediaDevice::Pull(const RefPtr<SourceMediaStream>& aStream,
+                       TrackID aTrackID,
+                       StreamTime aDesiredTime,
+                       const PrincipalHandle& aPrincipal)
+{
+  mSource->Pull(mAllocationHandle, aStream, aTrackID, aDesiredTime, aPrincipal);
+}
+
+dom::MediaSourceEnum
+MediaDevice::GetMediaSource() const
+{
+  return mSource->GetMediaSource();
 }
 
 static bool
 IsOn(const OwningBooleanOrMediaTrackConstraints &aUnion) {
   return !aUnion.IsBoolean() || aUnion.GetAsBoolean();
 }
 
 static const MediaTrackConstraints&
@@ -963,18 +959,18 @@ public:
   GetUserMediaStreamRunnable(
     const nsMainThreadPtrHandle<nsIDOMGetUserMediaSuccessCallback>& aOnSuccess,
     const nsMainThreadPtrHandle<nsIDOMGetUserMediaErrorCallback>& aOnFailure,
     uint64_t aWindowID,
     GetUserMediaWindowListener* aWindowListener,
     SourceListener* aSourceListener,
     const ipc::PrincipalInfo& aPrincipalInfo,
     const MediaStreamConstraints& aConstraints,
-    AudioDevice* aAudioDevice,
-    VideoDevice* aVideoDevice,
+    MediaDevice* aAudioDevice,
+    MediaDevice* aVideoDevice,
     PeerIdentity* aPeerIdentity)
     : Runnable("GetUserMediaStreamRunnable")
     , mOnSuccess(aOnSuccess)
     , mOnFailure(aOnFailure)
     , mConstraints(aConstraints)
     , mAudioDevice(aAudioDevice)
     , mVideoDevice(aVideoDevice)
     , mWindowID(aWindowID)
@@ -1107,17 +1103,17 @@ public:
           return mListener->ApplyConstraintsToTrack(aWindow, mTrackID,
                                                     aConstraints, aCallerType);
         }
 
         void
         GetSettings(dom::MediaTrackSettings& aOutSettings) override
         {
           if (mListener) {
-            mListener->GetSettings(aOutSettings, mTrackID);
+            mListener->GetDevice(mTrackID)->GetSettings(aOutSettings);
           }
         }
 
         void Stop() override
         {
           if (mListener) {
             mListener->StopTrack(mTrackID);
             mListener = nullptr;
@@ -1162,31 +1158,31 @@ public:
         DOMLocalMediaStream::CreateSourceStreamAsInput(window, msg,
                                                        new FakeTrackSourceGetter(principal)));
       stream = domStream->GetInputStream()->AsSourceStream();
 
       if (mAudioDevice) {
         nsString audioDeviceName;
         mAudioDevice->GetName(audioDeviceName);
         const MediaSourceEnum source =
-          mAudioDevice->GetSource()->GetMediaSource();
+          mAudioDevice->GetMediaSource();
         RefPtr<MediaStreamTrackSource> audioSource =
           new LocalTrackSource(principal, audioDeviceName, mSourceListener,
                                source, kAudioTrack, mPeerIdentity);
         MOZ_ASSERT(IsOn(mConstraints.mAudio));
         RefPtr<MediaStreamTrack> track =
           domStream->CreateDOMTrack(kAudioTrack, MediaSegment::AUDIO, audioSource,
                                     GetInvariant(mConstraints.mAudio));
         domStream->AddTrackInternal(track);
       }
       if (mVideoDevice) {
         nsString videoDeviceName;
         mVideoDevice->GetName(videoDeviceName);
         const MediaSourceEnum source =
-          mVideoDevice->GetSource()->GetMediaSource();
+          mVideoDevice->GetMediaSource();
         RefPtr<MediaStreamTrackSource> videoSource =
           new LocalTrackSource(principal, videoDeviceName, mSourceListener,
                                source, kVideoTrack, mPeerIdentity);
         MOZ_ASSERT(IsOn(mConstraints.mVideo));
         RefPtr<MediaStreamTrack> track =
           domStream->CreateDOMTrack(kVideoTrack, MediaSegment::VIDEO, videoSource,
                                     GetInvariant(mConstraints.mVideo));
         domStream->AddTrackInternal(track);
@@ -1228,35 +1224,35 @@ public:
     // notification lambda to ensure it's kept alive until that lambda runs or is discarded.
     RefPtr<GetUserMediaStreamRunnable> self = this;
     MediaManager::PostTask(NewTaskFrom([self, domStream, callback]() mutable {
       MOZ_ASSERT(MediaManager::IsInMediaThread());
       SourceMediaStream* source = self->mSourceListener->GetSourceStream();
 
       RefPtr<MediaMgrError> error = nullptr;
       if (self->mAudioDevice) {
-        nsresult rv =
-          self->mAudioDevice->GetSource()->Start(source, kAudioTrack,
-                                                 self->mSourceListener->GetPrincipalHandle());
+        nsresult rv = self->mAudioDevice->Start(source,
+                                                kAudioTrack,
+                                                self->mSourceListener->GetPrincipalHandle());
         if (NS_FAILED(rv)) {
           nsString log;
           if (rv == NS_ERROR_NOT_AVAILABLE) {
             log.AssignASCII("Concurrent mic process limit.");
             error = new MediaMgrError(NS_LITERAL_STRING("NotReadableError"), log);
           } else {
             log.AssignASCII("Starting audio failed");
             error = new MediaMgrError(NS_LITERAL_STRING("InternalError"), log);
           }
         }
       }
 
       if (!error && self->mVideoDevice) {
-        nsresult rv =
-          self->mVideoDevice->GetSource()->Start(source, kVideoTrack,
-                                                 self->mSourceListener->GetPrincipalHandle());
+        nsresult rv = self->mVideoDevice->Start(source,
+                                                kVideoTrack,
+                                                self->mSourceListener->GetPrincipalHandle());
         if (NS_FAILED(rv)) {
           nsString log;
           log.AssignASCII("Starting video failed");
           error = new MediaMgrError(NS_LITERAL_STRING("InternalError"), log);
         }
       }
 
       if (error) {
@@ -1311,57 +1307,59 @@ public:
     }
     return NS_OK;
   }
 
 private:
   nsMainThreadPtrHandle<nsIDOMGetUserMediaSuccessCallback> mOnSuccess;
   nsMainThreadPtrHandle<nsIDOMGetUserMediaErrorCallback> mOnFailure;
   MediaStreamConstraints mConstraints;
-  RefPtr<AudioDevice> mAudioDevice;
-  RefPtr<VideoDevice> mVideoDevice;
+  RefPtr<MediaDevice> mAudioDevice;
+  RefPtr<MediaDevice> mVideoDevice;
   uint64_t mWindowID;
   RefPtr<GetUserMediaWindowListener> mWindowListener;
   RefPtr<SourceListener> mSourceListener;
   ipc::PrincipalInfo mPrincipalInfo;
   RefPtr<PeerIdentity> mPeerIdentity;
   RefPtr<MediaManager> mManager; // get ref to this when creating the runnable
 };
 
 // Source getter returning full list
 
-template<class DeviceType>
 static void
 GetSources(MediaEngine *engine, MediaSourceEnum aSrcType,
-           void (MediaEngine::* aEnumerate)(MediaSourceEnum,
-               nsTArray<RefPtr<typename DeviceType::Source> >*),
-           nsTArray<RefPtr<DeviceType>>& aResult,
+           nsTArray<RefPtr<MediaDevice>>& aResult,
            const char* media_device_name = nullptr)
 {
-  nsTArray<RefPtr<typename DeviceType::Source>> sources;
-
-  (engine->*aEnumerate)(aSrcType, &sources);
-  /**
-    * We're allowing multiple tabs to access the same camera for parity
-    * with Chrome.  See bug 811757 for some of the issues surrounding
-    * this decision.  To disallow, we'd filter by IsAvailable() as we used
-    * to.
-    */
+  nsTArray<RefPtr<MediaEngineSource>> sources;
+  engine->EnumerateDevices(aSrcType, &sources);
+
+  /*
+   * We're allowing multiple tabs to access the same camera for parity
+   * with Chrome.  See bug 811757 for some of the issues surrounding
+   * this decision.  To disallow, we'd filter by IsAvailable() as we used
+   * to.
+   */
   if (media_device_name && *media_device_name)  {
     for (auto& source : sources) {
-      nsString deviceName;
-      source->GetName(deviceName);
+      nsString deviceName = source->GetName();
       if (deviceName.EqualsASCII(media_device_name)) {
-        aResult.AppendElement(new DeviceType(source));
+        aResult.AppendElement(MakeRefPtr<MediaDevice>(
+              source,
+              source->GetName(),
+              NS_ConvertUTF8toUTF16(source->GetUUID())));
         break;
       }
     }
   } else {
     for (auto& source : sources) {
-      aResult.AppendElement(new DeviceType(source));
+      aResult.AppendElement(MakeRefPtr<MediaDevice>(
+            source,
+            source->GetName(),
+            NS_ConvertUTF8toUTF16(source->GetUUID())));
     }
   }
 }
 
 // TODO: Remove once upgraded to GCC 4.8+ on linux. Bogus error on static func:
 // error: 'this' was not captured for this lambda function
 
 static auto& MediaManager_ToJSArray = MediaManager::ToJSArray;
@@ -1383,26 +1381,24 @@ MediaManager::SelectSettings(
   MediaManager::PostTask(NewTaskFrom([id, aConstraints,
                                       aSources, aIsChrome]() mutable {
     auto& sources = **aSources;
 
     // Since the advanced part of the constraints algorithm needs to know when
     // a candidate set is overconstrained (zero members), we must split up the
     // list into videos and audios, and put it back together again at the end.
 
-    nsTArray<RefPtr<VideoDevice>> videos;
-    nsTArray<RefPtr<AudioDevice>> audios;
+    nsTArray<RefPtr<MediaDevice>> videos;
+    nsTArray<RefPtr<MediaDevice>> audios;
 
     for (auto& source : sources) {
       if (source->mIsVideo) {
-        RefPtr<VideoDevice> video = static_cast<VideoDevice*>(source.get());
-        videos.AppendElement(video);
+        videos.AppendElement(source);
       } else {
-        RefPtr<AudioDevice> audio = static_cast<AudioDevice*>(source.get());
-        audios.AppendElement(audio);
+        audios.AppendElement(source);
       }
     }
     sources.Clear();
     const char* badConstraint = nullptr;
     bool needVideo = IsOn(aConstraints.mVideo);
     bool needAudio = IsOn(aConstraints.mAudio);
 
     if (needVideo && videos.Length()) {
@@ -1514,31 +1510,31 @@ public:
 
     if (mAudioDevice) {
       auto& constraints = GetInvariant(mConstraints.mAudio);
       rv = mAudioDevice->Allocate(constraints, mPrefs, mPrincipalInfo,
                                   &badConstraint);
       if (NS_FAILED(rv)) {
         errorMsg = "Failed to allocate audiosource";
         if (rv == NS_ERROR_NOT_AVAILABLE && !badConstraint) {
-          nsTArray<RefPtr<AudioDevice>> audios;
+          nsTArray<RefPtr<MediaDevice>> audios;
           audios.AppendElement(mAudioDevice);
           badConstraint = MediaConstraintsHelper::SelectSettings(
               NormalizedConstraints(constraints), audios, mIsChrome);
         }
       }
     }
     if (!errorMsg && mVideoDevice) {
       auto& constraints = GetInvariant(mConstraints.mVideo);
       rv = mVideoDevice->Allocate(constraints, mPrefs, mPrincipalInfo,
                                   &badConstraint);
       if (NS_FAILED(rv)) {
         errorMsg = "Failed to allocate videosource";
         if (rv == NS_ERROR_NOT_AVAILABLE && !badConstraint) {
-          nsTArray<RefPtr<VideoDevice>> videos;
+          nsTArray<RefPtr<MediaDevice>> videos;
           videos.AppendElement(mVideoDevice);
           badConstraint = MediaConstraintsHelper::SelectSettings(
               NormalizedConstraints(constraints), videos, mIsChrome);
         }
         if (mAudioDevice) {
           mAudioDevice->Deallocate();
         }
       }
@@ -1611,25 +1607,25 @@ public:
 
   const MediaStreamConstraints&
   GetConstraints()
   {
     return mConstraints;
   }
 
   nsresult
-  SetAudioDevice(AudioDevice* aAudioDevice)
+  SetAudioDevice(MediaDevice* aAudioDevice)
   {
     mAudioDevice = aAudioDevice;
     mDeviceChosen = true;
     return NS_OK;
   }
 
   nsresult
-  SetVideoDevice(VideoDevice* aVideoDevice)
+  SetVideoDevice(MediaDevice* aVideoDevice)
   {
     mVideoDevice = aVideoDevice;
     mDeviceChosen = true;
     return NS_OK;
   }
 
   uint64_t
   GetWindowID()
@@ -1640,18 +1636,18 @@ public:
 private:
   MediaStreamConstraints mConstraints;
 
   nsMainThreadPtrHandle<nsIDOMGetUserMediaSuccessCallback> mOnSuccess;
   nsMainThreadPtrHandle<nsIDOMGetUserMediaErrorCallback> mOnFailure;
   uint64_t mWindowID;
   RefPtr<GetUserMediaWindowListener> mWindowListener;
   RefPtr<SourceListener> mSourceListener;
-  RefPtr<AudioDevice> mAudioDevice;
-  RefPtr<VideoDevice> mVideoDevice;
+  RefPtr<MediaDevice> mAudioDevice;
+  RefPtr<MediaDevice> mVideoDevice;
   MediaEnginePrefs mPrefs;
   ipc::PrincipalInfo mPrincipalInfo;
   bool mIsChrome;
 
   bool mDeviceChosen;
 public:
   nsAutoPtr<MediaManager::SourceSet> mSourceSet;
 private:
@@ -1730,29 +1726,27 @@ MediaManager::EnumerateRawDevices(uint64
       MediaManager* manager = MediaManager::GetIfExists();
       MOZ_RELEASE_ASSERT(manager); // Must exist while media thread is alive
       realBackend = manager->GetBackend(aWindowId);
     }
 
     auto result = MakeUnique<SourceSet>();
 
     if (hasVideo) {
-      nsTArray<RefPtr<VideoDevice>> videos;
+      nsTArray<RefPtr<MediaDevice>> videos;
       GetSources(fakeCams? fakeBackend : realBackend, aVideoType,
-                 &MediaEngine::EnumerateVideoDevices, videos,
-                 videoLoopDev.get());
+                 videos, videoLoopDev.get());
       for (auto& source : videos) {
         result->AppendElement(source);
       }
     }
     if (hasAudio) {
-      nsTArray<RefPtr<AudioDevice>> audios;
+      nsTArray<RefPtr<MediaDevice>> audios;
       GetSources(fakeMics? fakeBackend : realBackend, aAudioType,
-                 &MediaEngine::EnumerateAudioDevices, audios,
-                 audioLoopDev.get());
+                 audios, audioLoopDev.get());
       for (auto& source : audios) {
         result->AppendElement(source);
       }
     }
     SourceSet* handoff = result.release();
     NS_DispatchToMainThread(NewRunnableFrom([id, handoff]() mutable {
       UniquePtr<SourceSet> result(handoff); // grab result
       MediaManager* mgr = MediaManager::GetIfExists();
@@ -1789,17 +1783,17 @@ MediaManager::EnumerateRawDevices(uint64
 }
 
 MediaManager::MediaManager()
   : mMediaThread(nullptr)
   , mBackend(nullptr) {
   mPrefs.mFreq         = 1000; // 1KHz test tone
   mPrefs.mWidth        = 0; // adaptive default
   mPrefs.mHeight       = 0; // adaptive default
-  mPrefs.mFPS          = MediaEngine::DEFAULT_VIDEO_FPS;
+  mPrefs.mFPS          = MediaEnginePrefs::DEFAULT_VIDEO_FPS;
   mPrefs.mAecOn        = false;
   mPrefs.mAgcOn        = false;
   mPrefs.mNoiseOn      = false;
   mPrefs.mExtendedFilter = true;
   mPrefs.mDelayAgnostic = true;
   mPrefs.mFakeDeviceChangeEventOn = false;
 #ifdef MOZ_WEBRTC
   mPrefs.mAec          = webrtc::kEcUnchanged;
@@ -2649,22 +2643,25 @@ MediaManager::GetUserMedia(nsPIDOMWindow
   });
   return NS_OK;
 }
 
 /* static */ void
 MediaManager::AnonymizeDevices(SourceSet& aDevices, const nsACString& aOriginKey)
 {
   if (!aOriginKey.IsEmpty()) {
-    for (auto& device : aDevices) {
+    for (RefPtr<MediaDevice>& device : aDevices) {
       nsString id;
       device->GetId(id);
-      device->SetRawId(id);
+      nsString rawId(id);
       AnonymizeId(id, aOriginKey);
-      device->SetId(id);
+      device = new MediaDevice(device->mSource,
+                               device->mName,
+                               id,
+                               rawId);
     }
   }
 }
 
 /* static */ nsresult
 MediaManager::AnonymizeId(nsAString& aId, const nsACString& aOriginKey)
 {
   MOZ_ASSERT(NS_IsMainThread());
@@ -3296,32 +3293,34 @@ MediaManager::Observe(nsISupports* aSubj
       uint32_t len = 0;
       array->GetLength(&len);
       bool videoFound = false, audioFound = false;
       for (uint32_t i = 0; i < len; i++) {
         nsCOMPtr<nsIMediaDevice> device;
         array->QueryElementAt(i, NS_GET_IID(nsIMediaDevice),
                               getter_AddRefs(device));
         MOZ_ASSERT(device); // shouldn't be returning anything else...
-        if (device) {
-          nsString type;
-          device->GetType(type);
-          if (type.EqualsLiteral("video")) {
-            if (!videoFound) {
-              task->SetVideoDevice(static_cast<VideoDevice*>(device.get()));
-              videoFound = true;
-            }
-          } else if (type.EqualsLiteral("audio")) {
-            if (!audioFound) {
-              task->SetAudioDevice(static_cast<AudioDevice*>(device.get()));
-              audioFound = true;
-            }
-          } else {
-            NS_WARNING("Unknown device type in getUserMedia");
+        if (!device) {
+          continue;
+        }
+
+        nsString type;
+        device->GetType(type);
+        if (type.EqualsLiteral("video")) {
+          if (!videoFound) {
+            task->SetVideoDevice(static_cast<MediaDevice*>(device.get()));
+            videoFound = true;
           }
+        } else if (type.EqualsLiteral("audio")) {
+          if (!audioFound) {
+            task->SetAudioDevice(static_cast<MediaDevice*>(device.get()));
+            audioFound = true;
+          }
+        } else {
+          NS_WARNING("Unknown device type in getUserMedia");
         }
       }
       bool needVideo = IsOn(task->GetConstraints().mVideo);
       bool needAudio = IsOn(task->GetConstraints().mAudio);
       MOZ_ASSERT(needVideo || needAudio);
 
       if ((needVideo && !videoFound) || (needAudio && !audioFound)) {
         task->Denied(NS_LITERAL_STRING("NotAllowedError"));
@@ -3640,16 +3639,30 @@ SourceListener::SourceListener()
   , mRemoved(false)
   , mAudioStopped(false)
   , mVideoStopped(false)
   , mMainThreadCheck(nullptr)
   , mPrincipalHandle(PRINCIPAL_HANDLE_NONE)
   , mWindowListener(nullptr)
 {}
 
+MediaDevice*
+SourceListener::GetDevice(TrackID aTrackID) const
+{
+  switch (aTrackID) {
+    case kAudioTrack:
+      return mAudioDevice;
+    case kVideoTrack:
+      return mVideoDevice;
+    default:
+      MOZ_ASSERT(false, "Unknown track id");
+      return nullptr;
+  }
+}
+
 void
 SourceListener::Register(GetUserMediaWindowListener* aListener)
 {
   LOG(("SourceListener %p registering with window listener %p", this, aListener));
 
   if (mWindowListener) {
     MOZ_ASSERT(false, "Already registered");
     return;
@@ -3663,18 +3676,18 @@ SourceListener::Register(GetUserMediaWin
     return;
   }
   mPrincipalHandle = aListener->GetPrincipalHandle();
   mWindowListener = aListener;
 }
 
 void
 SourceListener::Activate(SourceMediaStream* aStream,
-                         AudioDevice* aAudioDevice,
-                         VideoDevice* aVideoDevice)
+                         MediaDevice* aAudioDevice,
+                         MediaDevice* aVideoDevice)
 {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
 
   LOG(("SourceListener %p activating audio=%p video=%p", this, aAudioDevice, aVideoDevice));
 
   if (mStopped) {
     MOZ_ASSERT(false, "Cannot activate stopped source listener");
     return;
@@ -3797,19 +3810,18 @@ SourceListener::StopTrack(TrackID aTrack
       break;
     }
     default: {
       MOZ_ASSERT(false, "Unknown track id");
       return;
     }
   }
 
-  RefPtr<SourceMediaStream> source = mStream;
-  MediaManager::PostTask(NewTaskFrom([device, source, aTrackID]() {
-    device->GetSource()->Stop(source, aTrackID);
+  MediaManager::PostTask(NewTaskFrom([device, stream = mStream, aTrackID]() {
+    device->Stop(stream, aTrackID);
     device->Deallocate();
   }));
 
   if ((!mAudioDevice || mAudioStopped) &&
       (!mVideoDevice || mVideoStopped)) {
     LOG(("SourceListener %p this was the last track stopped", this));
     Stop();
   }
@@ -3953,52 +3965,30 @@ SourceListener::StopSharing()
 
 SourceMediaStream*
 SourceListener::GetSourceStream()
 {
   NS_ASSERTION(mStream,"Getting stream from never-activated SourceListener");
   return mStream;
 }
 
-void
-SourceListener::GetSettings(dom::MediaTrackSettings& aOutSettings, TrackID aTrackID)
-{
-  switch (aTrackID) {
-    case kVideoTrack: {
-      if (mVideoDevice) {
-        mVideoDevice->GetSource()->GetSettings(aOutSettings);
-      }
-      break;
-    }
-    case kAudioTrack: {
-      if (mAudioDevice) {
-        mAudioDevice->GetSource()->GetSettings(aOutSettings);
-      }
-      break;
-    }
-    default: {
-      MOZ_ASSERT(false, "Unknown track id");
-    }
-  }
-}
-
 // Proxy NotifyPull() to sources
 void
 SourceListener::NotifyPull(MediaStreamGraph* aGraph,
                            StreamTime aDesiredTime)
 {
   // Currently audio sources ignore NotifyPull, but they could
   // watch it especially for fake audio.
   if (mAudioDevice) {
-    mAudioDevice->GetSource()->NotifyPull(aGraph, mStream, kAudioTrack,
-                                          aDesiredTime, mPrincipalHandle);
+    mAudioDevice->Pull(mStream, kAudioTrack,
+                       aDesiredTime, mPrincipalHandle);
   }
   if (mVideoDevice) {
-    mVideoDevice->GetSource()->NotifyPull(aGraph, mStream, kVideoTrack,
-                                          aDesiredTime, mPrincipalHandle);
+    mVideoDevice->Pull(mStream, kVideoTrack,
+                       aDesiredTime, mPrincipalHandle);
   }
 }
 
 void
 SourceListener::NotifyEvent(MediaStreamGraph* aGraph,
                             MediaStreamGraphEvent aEvent)
 {
   nsCOMPtr<nsIEventTarget> target;
@@ -4067,83 +4057,83 @@ SourceListener::NotifyRemoved()
   mWindowListener = nullptr;
 }
 
 bool
 SourceListener::CapturingVideo() const
 {
   MOZ_ASSERT(NS_IsMainThread());
   return Activated() && mVideoDevice && !mVideoStopped &&
-         !mVideoDevice->GetSource()->IsAvailable() &&
+         !mVideoDevice->mSource->IsAvailable() &&
          mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Camera &&
-         (!mVideoDevice->GetSource()->IsFake() ||
+         (!mVideoDevice->mSource->IsFake() ||
           Preferences::GetBool("media.navigator.permission.fake"));
 }
 
 bool
 SourceListener::CapturingAudio() const
 {
   MOZ_ASSERT(NS_IsMainThread());
   return Activated() && mAudioDevice && !mAudioStopped &&
-         !mAudioDevice->GetSource()->IsAvailable() &&
-         (!mAudioDevice->GetSource()->IsFake() ||
+         !mAudioDevice->mSource->IsAvailable() &&
+         (!mAudioDevice->mSource->IsFake() ||
           Preferences::GetBool("media.navigator.permission.fake"));
 }
 
 bool
 SourceListener::CapturingScreen() const
 {
   MOZ_ASSERT(NS_IsMainThread());
   return Activated() && mVideoDevice && !mVideoStopped &&
-         !mVideoDevice->GetSource()->IsAvailable() &&
+         !mVideoDevice->mSource->IsAvailable() &&
          mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Screen;
 }
 
 bool
 SourceListener::CapturingWindow() const
 {
   MOZ_ASSERT(NS_IsMainThread());
   return Activated() && mVideoDevice && !mVideoStopped &&
-         !mVideoDevice->GetSource()->IsAvailable() &&
+         !mVideoDevice->mSource->IsAvailable() &&
          mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Window;
 }
 
 bool
 SourceListener::CapturingApplication() const
 {
   MOZ_ASSERT(NS_IsMainThread());
   return Activated() && mVideoDevice && !mVideoStopped &&
-         !mVideoDevice->GetSource()->IsAvailable() &&
+         !mVideoDevice->mSource->IsAvailable() &&
          mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Application;
 }
 
 bool
 SourceListener::CapturingBrowser() const
 {
   MOZ_ASSERT(NS_IsMainThread());
   return Activated() && mVideoDevice && !mVideoStopped &&
-         !mVideoDevice->GetSource()->IsAvailable() &&
+         !mVideoDevice->mSource->IsAvailable() &&
          mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Browser;
 }
 
 already_AddRefed<PledgeVoid>
 SourceListener::ApplyConstraintsToTrack(
     nsPIDOMWindowInner* aWindow,
     TrackID aTrackID,
     const MediaTrackConstraints& aConstraintsPassedIn,
     dom::CallerType aCallerType)
 {
   MOZ_ASSERT(NS_IsMainThread());
   RefPtr<PledgeVoid> p = new PledgeVoid();
 
   // XXX to support multiple tracks of a type in a stream, this should key off
   // the TrackID and not just the type
-  RefPtr<AudioDevice> audioDevice =
+  RefPtr<MediaDevice> audioDevice =
     aTrackID == kAudioTrack ? mAudioDevice.get() : nullptr;
-  RefPtr<VideoDevice> videoDevice =
+  RefPtr<MediaDevice> videoDevice =
     aTrackID == kVideoTrack ? mVideoDevice.get() : nullptr;
 
   if (mStopped || (!audioDevice && !videoDevice))
   {
     LOG(("gUM track %d applyConstraints, but we don't have type %s",
          aTrackID, aTrackID == kAudioTrack ? "audio" : "video"));
     p->Resolve(false);
     return p.forget();
@@ -4172,27 +4162,27 @@ SourceListener::ApplyConstraintsToTrack(
                                       c, isChrome]() mutable {
     MOZ_ASSERT(MediaManager::IsInMediaThread());
     MediaManager* mgr = MediaManager::GetIfExists();
     MOZ_RELEASE_ASSERT(mgr); // Must exist while media thread is alive
     const char* badConstraint = nullptr;
     nsresult rv = NS_OK;
 
     if (audioDevice) {
-      rv = audioDevice->Restart(c, mgr->mPrefs, &badConstraint);
+      rv = audioDevice->Reconfigure(c, mgr->mPrefs, &badConstraint);
       if (rv == NS_ERROR_NOT_AVAILABLE && !badConstraint) {
-        nsTArray<RefPtr<AudioDevice>> audios;
+        nsTArray<RefPtr<MediaDevice>> audios;
         audios.AppendElement(audioDevice);
         badConstraint = MediaConstraintsHelper::SelectSettings(
             NormalizedConstraints(c), audios, isChrome);
       }
     } else {
-      rv = videoDevice->Restart(c, mgr->mPrefs, &badConstraint);
+      rv = videoDevice->Reconfigure(c, mgr->mPrefs, &badConstraint);
       if (rv == NS_ERROR_NOT_AVAILABLE && !badConstraint) {
-        nsTArray<RefPtr<VideoDevice>> videos;
+        nsTArray<RefPtr<MediaDevice>> videos;
         videos.AppendElement(videoDevice);
         badConstraint = MediaConstraintsHelper::SelectSettings(
             NormalizedConstraints(c), videos, isChrome);
       }
     }
     NS_DispatchToMainThread(NewRunnableFrom([id, windowId, rv,
                                              badConstraint]() mutable {
       MOZ_ASSERT(NS_IsMainThread());
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -1,16 +1,17 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MOZILLA_MEDIAMANAGER_H
 #define MOZILLA_MEDIAMANAGER_H
 
 #include "MediaEngine.h"
+#include "MediaEnginePrefs.h"
 #include "mozilla/media/DeviceChangeCallback.h"
 #include "mozilla/dom/GetUserMediaRequest.h"
 #include "mozilla/Services.h"
 #include "mozilla/Unused.h"
 #include "nsAutoPtr.h"
 #include "nsIMediaManager.h"
 
 #include "nsHashKeys.h"
@@ -49,87 +50,84 @@ struct MediaTrackConstraints;
 struct MediaTrackConstraintSet;
 enum class CallerType : uint32_t;
 } // namespace dom
 
 namespace ipc {
 class PrincipalInfo;
 }
 
+class AllocationHandle;
 class GetUserMediaTask;
 class GetUserMediaWindowListener;
 class MediaManager;
 class SourceListener;
 
+LogModule* GetMediaManagerLog();
+
 class MediaDevice : public nsIMediaDevice
 {
 public:
-  typedef MediaEngineSource Source;
-
   NS_DECL_THREADSAFE_ISUPPORTS
   NS_DECL_NSIMEDIADEVICE
 
-  void SetId(const nsAString& aID);
-  void SetRawId(const nsAString& aID);
-  virtual uint32_t GetBestFitnessDistance(
+  explicit MediaDevice(MediaEngineSource* aSource,
+                       const nsString& aName,
+                       const nsString& aID,
+                       const nsString& aRawID = NS_LITERAL_STRING(""));
+
+  uint32_t GetBestFitnessDistance(
       const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
       bool aIsChrome);
-  virtual Source* GetSource() = 0;
-  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
-                    const MediaEnginePrefs &aPrefs,
+
+  nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
+                    const MediaEnginePrefs& aPrefs,
                     const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
                     const char** aOutBadConstraint);
-  nsresult Restart(const dom::MediaTrackConstraints &aConstraints,
-                   const MediaEnginePrefs &aPrefs,
-                   const char** aOutBadConstraint);
+  nsresult Start(SourceMediaStream* aStream,
+                 TrackID aTrackID,
+                 const PrincipalHandle& aPrincipal);
+  nsresult Reconfigure(const dom::MediaTrackConstraints& aConstraints,
+                       const MediaEnginePrefs& aPrefs,
+                       const char** aOutBadConstraint);
+  nsresult Stop(SourceMediaStream* aStream,
+                TrackID aTrackID);
   nsresult Deallocate();
+
+  void Pull(const RefPtr<SourceMediaStream>& aStream,
+            TrackID aTrackID,
+            StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipal);
+
+  void GetSettings(dom::MediaTrackSettings& aOutSettings) const;
+
+  dom::MediaSourceEnum GetMediaSource() const;
 protected:
-  virtual ~MediaDevice() {}
-  explicit MediaDevice(MediaEngineSource* aSource, bool aIsVideo);
+  virtual ~MediaDevice() = default;
 
   static uint32_t FitnessDistance(nsString aN,
     const dom::OwningStringOrStringSequenceOrConstrainDOMStringParameters& aConstraint);
 private:
   static bool StringsContain(const dom::OwningStringOrStringSequence& aStrings,
                              nsString aN);
   static uint32_t FitnessDistance(nsString aN,
       const dom::ConstrainDOMStringParameters& aParams);
-protected:
-  nsString mName;
-  nsString mID;
-  nsString mRawID;
-  bool mScary;
-  dom::MediaSourceEnum mMediaSource;
-  RefPtr<MediaEngineSource> mSource;
-  RefPtr<MediaEngineSource::AllocationHandle> mAllocationHandle;
-public:
-  dom::MediaSourceEnum GetMediaSource() {
-    return mMediaSource;
-  }
-  bool mIsVideo;
-};
+
+  // Assigned on allocation on media thread, then read on the media thread and
+  // graph thread
+  RefPtr<AllocationHandle> mAllocationHandle;
 
-class VideoDevice : public MediaDevice
-{
 public:
-  typedef MediaEngineVideoSource Source;
-
-  explicit VideoDevice(Source* aSource);
-  NS_IMETHOD GetType(nsAString& aType) override;
-  Source* GetSource() override;
-};
-
-class AudioDevice : public MediaDevice
-{
-public:
-  typedef MediaEngineAudioSource Source;
-
-  explicit AudioDevice(Source* aSource);
-  NS_IMETHOD GetType(nsAString& aType) override;
-  Source* GetSource() override;
+  const RefPtr<MediaEngineSource> mSource;
+  const bool mIsVideo;
+  const bool mScary;
+  const nsString mType;
+  const nsString mName;
+  const nsString mID;
+  const nsString mRawID;
 };
 
 typedef nsRefPtrHashtable<nsUint64HashKey, GetUserMediaWindowListener> WindowTable;
 
 // we could add MediaManager if needed
 typedef void (*WindowListenerCallback)(MediaManager *aThis,
                                        uint64_t aWindowID,
                                        GetUserMediaWindowListener *aListener,
--- a/dom/media/imagecapture/ImageCapture.cpp
+++ b/dom/media/imagecapture/ImageCapture.cpp
@@ -9,17 +9,17 @@
 #include "mozilla/dom/DOMException.h"
 #include "mozilla/dom/File.h"
 #include "mozilla/dom/ImageCaptureError.h"
 #include "mozilla/dom/ImageCaptureErrorEvent.h"
 #include "mozilla/dom/ImageCaptureErrorEventBinding.h"
 #include "mozilla/dom/VideoStreamTrack.h"
 #include "nsIDocument.h"
 #include "CaptureTask.h"
-#include "MediaEngine.h"
+#include "MediaEngineSource.h"
 
 namespace mozilla {
 
 LogModule* GetICLog()
 {
   static LazyLogModule log("ImageCapture");
   return log;
 }
--- a/dom/media/systemservices/CamerasChild.cpp
+++ b/dom/media/systemservices/CamerasChild.cpp
@@ -687,31 +687,16 @@ CamerasChild::SetFakeDeviceChangeEvents(
   // To simulate the devicechange event in mochitest,
   // we fire a fake devicechange event in Camera IPC thread periodically
   RefPtr<FakeOnDeviceChangeEventRunnable> evt = new FakeOnDeviceChangeEventRunnable(0);
   CamerasSingleton::FakeDeviceChangeEventThread()->Dispatch(evt.forget(), NS_DISPATCH_NORMAL);
 
   return 0;
 }
 
-mozilla::ipc::IPCResult
-CamerasChild::RecvFrameSizeChange(const CaptureEngine& capEngine,
-                                  const int& capId,
-                                  const int& w, const int& h)
-{
-  LOG((__PRETTY_FUNCTION__));
-  MutexAutoLock lock(mCallbackMutex);
-  if (Callback(capEngine, capId)) {
-    Callback(capEngine, capId)->FrameSizeChange(w, h);
-  } else {
-    LOG(("Frame size change with dead callback"));
-  }
-  return IPC_OK();
-}
-
 void
 CamerasChild::ActorDestroy(ActorDestroyReason aWhy)
 {
   MonitorAutoLock monitor(mReplyMonitor);
   mIPCIsAlive = false;
   // Hopefully prevent us from getting stuck
   // on replies that'll never come.
   monitor.NotifyAll();
--- a/dom/media/systemservices/CamerasChild.h
+++ b/dom/media/systemservices/CamerasChild.h
@@ -29,17 +29,16 @@ class PrincipalInfo;
 }
 
 namespace camera {
 
 class FrameRelay {
 public:
   virtual int DeliverFrame(uint8_t* buffer,
     const mozilla::camera::VideoFrameProperties& props) = 0;
-  virtual void FrameSizeChange(unsigned int w, unsigned int h) = 0;
 };
 
 struct CapturerElement {
   CaptureEngine engine;
   int id;
   FrameRelay* callback;
 };
 
@@ -151,18 +150,16 @@ public:
   // takes a non-owning reference.
   NS_INLINE_DECL_REFCOUNTING(CamerasChild)
 
   // IPC messages recevied, received on the PBackground thread
   // these are the actual callbacks with data
   mozilla::ipc::IPCResult RecvDeliverFrame(const CaptureEngine&, const int&,
                                            mozilla::ipc::Shmem&&,
                                            const VideoFrameProperties & prop) override;
-  mozilla::ipc::IPCResult RecvFrameSizeChange(const CaptureEngine&, const int&,
-                                              const int& w, const int& h) override;
 
   mozilla::ipc::IPCResult RecvDeviceChange() override;
   int AddDeviceChangeCallback(DeviceChangeCallback* aCallback) override;
   int SetFakeDeviceChangeEvents();
 
   // these are response messages to our outgoing requests
   mozilla::ipc::IPCResult RecvReplyNumberOfCaptureDevices(const int&) override;
   mozilla::ipc::IPCResult RecvReplyNumberOfCapabilities(const int&) override;
--- a/dom/media/systemservices/CamerasParent.cpp
+++ b/dom/media/systemservices/CamerasParent.cpp
@@ -1,16 +1,16 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set sw=2 ts=8 et ft=cpp : */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "CamerasParent.h"
-#include "MediaEngine.h"
+#include "MediaEngineSource.h"
 #include "MediaUtils.h"
 #include "VideoFrameUtils.h"
 
 #include "mozilla/Assertions.h"
 #include "mozilla/Unused.h"
 #include "mozilla/Services.h"
 #include "mozilla/Logging.h"
 #include "mozilla/ipc/BackgroundParent.h"
--- a/dom/media/systemservices/PCameras.ipdl
+++ b/dom/media/systemservices/PCameras.ipdl
@@ -50,17 +50,16 @@ struct VideoFrameProperties
   int vStride;
 };
 
 async protocol PCameras
 {
   manager PBackground;
 
 child:
-  async FrameSizeChange(CaptureEngine capEngine, int cap_id, int w, int h);
   // transfers ownership of |buffer| from parent to child
   async DeliverFrame(CaptureEngine capEngine, int streamId,
                      Shmem buffer, VideoFrameProperties props);
   async DeviceChange();
   async ReplyNumberOfCaptureDevices(int numdev);
   async ReplyNumberOfCapabilities(int numdev);
   async ReplyAllocateCaptureDevice(int numdev);
   async ReplyGetCaptureCapability(VideoCaptureCapability cap);
new file mode 100644
--- /dev/null
+++ b/dom/media/webrtc/AllocationHandle.h
@@ -0,0 +1,65 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef AllocationHandle_h
+#define AllocationHandle_h
+
+#include <cstdint>
+#include <limits>
+
+#include "MediaEnginePrefs.h"
+#include "MediaManager.h"
+#include "MediaTrackConstraints.h"
+#include "mozilla/Assertions.h"
+#include "mozilla/ipc/PBackgroundSharedTypes.h"
+#include "nsString.h"
+
+namespace mozilla {
+
+/**
+ * AllocationHandle helps keep track of metadata for allocations of shared
+ * MediaEngineSources. That is, for MediaEngineSources that support more than
+ * one concurrent allocation.
+ */
+class AllocationHandle
+{
+  ~AllocationHandle() = default;
+
+public:
+  static uint64_t GetUniqueId()
+  {
+    static uint64_t sId = 0;
+
+    MOZ_ASSERT(MediaManager::GetIfExists());
+    MOZ_ASSERT(MediaManager::GetIfExists()->IsInMediaThread());
+    MOZ_RELEASE_ASSERT(sId < std::numeric_limits<decltype(sId)>::max());
+    return sId++;
+  }
+
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AllocationHandle);
+
+  AllocationHandle() = delete;
+  AllocationHandle(const dom::MediaTrackConstraints& aConstraints,
+                   const ipc::PrincipalInfo& aPrincipalInfo,
+                   const MediaEnginePrefs& aPrefs,
+                   const nsString& aDeviceId)
+    : mId(GetUniqueId())
+    , mDeviceId(aDeviceId)
+    , mPrincipalInfo(aPrincipalInfo)
+    , mConstraints(aConstraints)
+    , mPrefs(aPrefs)
+  {}
+
+  const uint64_t mId;
+  const nsString mDeviceId;
+  const ipc::PrincipalInfo mPrincipalInfo;
+  NormalizedConstraints mConstraints;
+  MediaEnginePrefs mPrefs;
+};
+
+} // namespace mozilla
+
+#endif // AllocationHandle_h
--- a/dom/media/webrtc/MediaEngine.h
+++ b/dom/media/webrtc/MediaEngine.h
@@ -1,481 +1,60 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MEDIAENGINE_H_
 #define MEDIAENGINE_H_
 
-#include "mozilla/RefPtr.h"
 #include "DOMMediaStream.h"
 #include "MediaStreamGraph.h"
 #include "MediaTrackConstraints.h"
 #include "mozilla/dom/MediaStreamTrackBinding.h"
 #include "mozilla/dom/VideoStreamTrack.h"
-#include "mozilla/ipc/PBackgroundSharedTypes.h"
 #include "mozilla/media/DeviceChangeCallback.h"
+#include "mozilla/RefPtr.h"
+#include "mozilla/ThreadSafeWeakPtr.h"
 
 namespace mozilla {
 
 namespace dom {
 class Blob;
 } // namespace dom
 
+class AllocationHandle;
+class MediaEngineSource;
+
 enum {
   kVideoTrack = 1,
   kAudioTrack = 2,
   kTrackCount
 };
 
-/**
- * Abstract interface for managing audio and video devices. Each platform
- * must implement a concrete class that will map these classes and methods
- * to the appropriate backend. For example, on Desktop platforms, these will
- * correspond to equivalent webrtc (GIPS) calls.
- */
-class MediaEngineVideoSource;
-class MediaEngineAudioSource;
-
-enum MediaEngineState {
-  kAllocated,
-  kStarted,
-  kStopped,
-  kReleased
-};
-
 class MediaEngine : public DeviceChangeCallback
 {
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaEngine)
+  NS_DECL_OWNINGTHREAD
 
-  static const int DEFAULT_VIDEO_FPS = 30;
-  static const int DEFAULT_43_VIDEO_WIDTH = 640;
-  static const int DEFAULT_43_VIDEO_HEIGHT = 480;
-  static const int DEFAULT_169_VIDEO_WIDTH = 1280;
-  static const int DEFAULT_169_VIDEO_HEIGHT = 720;
+  void AssertIsOnOwningThread() const
+  {
+    NS_ASSERT_OWNINGTHREAD(MediaEngine);
+  }
 
-  /* Populate an array of video sources in the nsTArray. Also include devices
-   * that are currently unavailable. */
-  virtual void EnumerateVideoDevices(dom::MediaSourceEnum,
-                                     nsTArray<RefPtr<MediaEngineVideoSource> >*) = 0;
-
-  /* Populate an array of audio sources in the nsTArray. Also include devices
-   * that are currently unavailable. */
-  virtual void EnumerateAudioDevices(dom::MediaSourceEnum,
-                                     nsTArray<RefPtr<MediaEngineAudioSource> >*) = 0;
+  /**
+   * Populate an array of sources of the requested type in the nsTArray.
+   * Also include devices that are currently unavailable.
+   */
+  virtual void EnumerateDevices(dom::MediaSourceEnum,
+                                nsTArray<RefPtr<MediaEngineSource>>*) = 0;
 
   virtual void Shutdown() = 0;
 
   virtual void SetFakeDeviceChangeEvents() {}
 
 protected:
   virtual ~MediaEngine() {}
 };
 
-/**
- * Video source and friends.
- */
-class MediaEnginePrefs {
-public:
-  MediaEnginePrefs()
-    : mWidth(0)
-    , mHeight(0)
-    , mFPS(0)
-    , mFreq(0)
-    , mAecOn(false)
-    , mAgcOn(false)
-    , mNoiseOn(false)
-    , mAec(0)
-    , mAgc(0)
-    , mNoise(0)
-    , mFullDuplex(false)
-    , mExtendedFilter(false)
-    , mDelayAgnostic(false)
-    , mFakeDeviceChangeEventOn(false)
-    , mChannels(0)
-  {}
-
-  int32_t mWidth;
-  int32_t mHeight;
-  int32_t mFPS;
-  int32_t mFreq; // for test tones (fake:true)
-  bool mAecOn;
-  bool mAgcOn;
-  bool mNoiseOn;
-  int32_t mAec;
-  int32_t mAgc;
-  int32_t mNoise;
-  bool mFullDuplex;
-  bool mExtendedFilter;
-  bool mDelayAgnostic;
-  bool mFakeDeviceChangeEventOn;
-  int32_t mChannels;
-
-  // mWidth and/or mHeight may be zero (=adaptive default), so use functions.
-
-  int32_t GetWidth(bool aHD = false) const {
-    return mWidth? mWidth : (mHeight?
-                             (mHeight * GetDefWidth(aHD)) / GetDefHeight(aHD) :
-                             GetDefWidth(aHD));
-  }
-
-  int32_t GetHeight(bool aHD = false) const {
-    return mHeight? mHeight : (mWidth?
-                               (mWidth * GetDefHeight(aHD)) / GetDefWidth(aHD) :
-                               GetDefHeight(aHD));
-  }
-private:
-  static int32_t GetDefWidth(bool aHD = false) {
-    // It'd be nice if we could use the ternary operator here, but we can't
-    // because of bug 1002729.
-    if (aHD) {
-      return MediaEngine::DEFAULT_169_VIDEO_WIDTH;
-    }
-
-    return MediaEngine::DEFAULT_43_VIDEO_WIDTH;
-  }
-
-  static int32_t GetDefHeight(bool aHD = false) {
-    // It'd be nice if we could use the ternary operator here, but we can't
-    // because of bug 1002729.
-    if (aHD) {
-      return MediaEngine::DEFAULT_169_VIDEO_HEIGHT;
-    }
-
-    return MediaEngine::DEFAULT_43_VIDEO_HEIGHT;
-  }
-};
-
-/**
- * Callback interface for TakePhoto(). Either PhotoComplete() or PhotoError()
- * should be called.
- */
-class MediaEnginePhotoCallback {
-public:
-  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaEnginePhotoCallback)
-
-  // aBlob is the image captured by MediaEngineSource. It is
-  // called on main thread.
-  virtual nsresult PhotoComplete(already_AddRefed<dom::Blob> aBlob) = 0;
-
-  // It is called on main thread. aRv is the error code.
-  virtual nsresult PhotoError(nsresult aRv) = 0;
-
-protected:
-  virtual ~MediaEnginePhotoCallback() {}
-};
-
-/**
- * Common abstract base class for audio and video sources.
- *
- * By default, the base class implements Allocate and Deallocate using its
- * UpdateSingleSource pattern, which manages allocation handles and calculates
- * net constraints from competing allocations and updates a single shared device.
- *
- * Classes that don't operate as a single shared device can override Allocate
- * and Deallocate and simply not pass the methods up.
- */
-class MediaEngineSource : public nsISupports,
-                          protected MediaConstraintsHelper
-{
-public:
-  // code inside webrtc.org assumes these sizes; don't use anything smaller
-  // without verifying it's ok
-  static const unsigned int kMaxDeviceNameLength = 128;
-  static const unsigned int kMaxUniqueIdLength = 256;
-
-  virtual ~MediaEngineSource()
-  {
-    if (!mInShutdown) {
-      Shutdown();
-    }
-  }
-
-  virtual void Shutdown()
-  {
-    mInShutdown = true;
-  };
-
-  /* Populate the human readable name of this device in the nsAString */
-  virtual void GetName(nsAString&) const = 0;
-
-  /* Populate the UUID of this device in the nsACString */
-  virtual void GetUUID(nsACString&) const = 0;
-
-  /* Override w/true if source does end-run around cross origin restrictions. */
-  virtual bool GetScary() const { return false; };
-
-  class AllocationHandle
-  {
-  public:
-    NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AllocationHandle);
-  protected:
-    ~AllocationHandle() {}
-    static uint64_t sId;
-  public:
-    AllocationHandle(const dom::MediaTrackConstraints& aConstraints,
-                     const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
-                     const MediaEnginePrefs& aPrefs,
-                     const nsString& aDeviceId)
-
-    : mConstraints(aConstraints),
-      mPrincipalInfo(aPrincipalInfo),
-      mPrefs(aPrefs),
-#ifdef MOZ_WEBRTC
-      mId(sId++),
-#endif
-      mDeviceId(aDeviceId) {}
-  public:
-    NormalizedConstraints mConstraints;
-    mozilla::ipc::PrincipalInfo mPrincipalInfo;
-    MediaEnginePrefs mPrefs;
-    uint64_t mId;
-    nsString mDeviceId;
-  };
-
-  /* Release the device back to the system. */
-  virtual nsresult Deallocate(AllocationHandle* aHandle)
-  {
-    MOZ_ASSERT(aHandle);
-    RefPtr<AllocationHandle> handle = aHandle;
-
-    class Comparator {
-    public:
-      static bool Equals(const RefPtr<AllocationHandle>& a,
-                         const RefPtr<AllocationHandle>& b) {
-        return a.get() == b.get();
-      }
-    };
-
-    auto ix = mRegisteredHandles.IndexOf(handle, 0, Comparator());
-    if (ix == mRegisteredHandles.NoIndex) {
-      MOZ_ASSERT(false);
-      return NS_ERROR_FAILURE;
-    }
-
-    mRegisteredHandles.RemoveElementAt(ix);
-    if (mRegisteredHandles.Length() && !mInShutdown) {
-      // Whenever constraints are removed, other parties may get closer to ideal.
-      auto& first = mRegisteredHandles[0];
-      const char* badConstraint = nullptr;
-      return ReevaluateAllocation(nullptr, nullptr, first->mPrefs,
-                                  first->mDeviceId, &badConstraint);
-    }
-    return NS_OK;
-  }
-
-  /* Start the device and add the track to the provided SourceMediaStream, with
-   * the provided TrackID. You may start appending data to the track
-   * immediately after. */
-  virtual nsresult Start(SourceMediaStream*, TrackID, const PrincipalHandle&) = 0;
-
-  /* Called when the stream wants more data */
-  virtual void NotifyPull(MediaStreamGraph* aGraph,
-                          SourceMediaStream *aSource,
-                          TrackID aId,
-                          StreamTime aDesiredTime,
-                          const PrincipalHandle& aPrincipalHandle) = 0;
-
-  /* Stop the device and release the corresponding MediaStream */
-  virtual nsresult Stop(SourceMediaStream *aSource, TrackID aID) = 0;
-
-  /* Restart with new capability */
-  virtual nsresult Restart(AllocationHandle* aHandle,
-                           const dom::MediaTrackConstraints& aConstraints,
-                           const MediaEnginePrefs &aPrefs,
-                           const nsString& aDeviceId,
-                           const char** aOutBadConstraint) = 0;
-
-  /* Returns true if a source represents a fake capture device and
-   * false otherwise
-   */
-  virtual bool IsFake() = 0;
-
-  /* Returns the type of media source (camera, microphone, screen, window, etc) */
-  virtual dom::MediaSourceEnum GetMediaSource() const = 0;
-
-  /* If implementation of MediaEngineSource supports TakePhoto(), the picture
-   * should be return via aCallback object. Otherwise, it returns NS_ERROR_NOT_IMPLEMENTED.
-   */
-  virtual nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) = 0;
-
-  /* Return false if device is currently allocated or started */
-  bool IsAvailable() {
-    if (mState == kAllocated || mState == kStarted) {
-      return false;
-    } else {
-      return true;
-    }
-  }
-
-  /* It is an error to call Start() before an Allocate(), and Stop() before
-   * a Start(). Only Allocate() may be called after a Deallocate(). */
-
-  /* This call reserves but does not start the device. */
-  virtual nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
-                            const MediaEnginePrefs &aPrefs,
-                            const nsString& aDeviceId,
-                            const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
-                            AllocationHandle** aOutHandle,
-                            const char** aOutBadConstraint)
-  {
-    AssertIsOnOwningThread();
-    MOZ_ASSERT(aOutHandle);
-    RefPtr<AllocationHandle> handle =
-      new AllocationHandle(aConstraints, aPrincipalInfo, aPrefs, aDeviceId);
-    nsresult rv = ReevaluateAllocation(handle, nullptr, aPrefs, aDeviceId,
-                                       aOutBadConstraint);
-    if (NS_FAILED(rv)) {
-      return rv;
-    }
-    mRegisteredHandles.AppendElement(handle);
-    handle.forget(aOutHandle);
-    return NS_OK;
-  }
-
-  virtual uint32_t GetBestFitnessDistance(
-      const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
-      const nsString& aDeviceId) const = 0;
-
-  void GetSettings(dom::MediaTrackSettings& aOutSettings)
-  {
-    MOZ_ASSERT(NS_IsMainThread());
-    aOutSettings = *mSettings;
-  }
-
-protected:
-  // Only class' own members can be initialized in constructor initializer list.
-  explicit MediaEngineSource(MediaEngineState aState)
-    : mState(aState)
-    , mInShutdown(false)
-    , mSettings(MakeRefPtr<media::Refcountable<dom::MediaTrackSettings>>())
-  {}
-
-  /* UpdateSingleSource - Centralized abstract function to implement in those
-   * cases where a single device is being shared between users. Should apply net
-   * constraints and restart the device as needed.
-   *
-   * aHandle           - New or existing handle, or null to update after removal.
-   * aNetConstraints   - Net constraints to be applied to the single device.
-   * aPrefs            - As passed in (in case of changes in about:config).
-   * aDeviceId         - As passed in (origin dependent).
-   * aOutBadConstraint - Result: nonzero if failed to apply. Name of culprit.
-   */
-
-  virtual nsresult
-  UpdateSingleSource(const AllocationHandle* aHandle,
-                     const NormalizedConstraints& aNetConstraints,
-                     const NormalizedConstraints& aNewConstraint,
-                     const MediaEnginePrefs& aPrefs,
-                     const nsString& aDeviceId,
-                     const char** aOutBadConstraint) {
-    return NS_ERROR_NOT_IMPLEMENTED;
-  };
-
-  /* ReevaluateAllocation - Call to change constraints for an allocation of
-   * a single device. Manages allocation handles, calculates net constraints
-   * from all competing allocations, and calls UpdateSingleSource with the net
-   * result, to restart the single device as needed.
-   *
-   * aHandle            - New or existing handle, or null to update after removal.
-   * aConstraintsUpdate - Constraints to be applied to existing handle, or null.
-   * aPrefs             - As passed in (in case of changes from about:config).
-   * aDeviceId          - As passed in (origin-dependent id).
-   * aOutBadConstraint  - Result: nonzero if failed to apply. Name of culprit.
-   */
-
-  nsresult
-  ReevaluateAllocation(AllocationHandle* aHandle,
-                       NormalizedConstraints* aConstraintsUpdate,
-                       const MediaEnginePrefs& aPrefs,
-                       const nsString& aDeviceId,
-                       const char** aOutBadConstraint)
-  {
-    // aHandle and/or aConstraintsUpdate may be nullptr (see below)
-
-    AutoTArray<const NormalizedConstraints*, 10> allConstraints;
-    AutoTArray<const NormalizedConstraints*, 1> updatedConstraint;
-    for (auto& registered : mRegisteredHandles) {
-      if (aConstraintsUpdate && registered.get() == aHandle) {
-        continue; // Don't count old constraints
-      }
-      allConstraints.AppendElement(&registered->mConstraints);
-    }
-    if (aConstraintsUpdate) {
-      allConstraints.AppendElement(aConstraintsUpdate);
-      updatedConstraint.AppendElement(aConstraintsUpdate);
-    } else if (aHandle) {
-      // In the case of AddShareOfSingleSource, the handle isn't registered yet.
-      allConstraints.AppendElement(&aHandle->mConstraints);
-      updatedConstraint.AppendElement(&aHandle->mConstraints);
-    } else {
-      updatedConstraint.AppendElements(allConstraints);
-    }
-
-    NormalizedConstraints netConstraints(allConstraints);
-    if (netConstraints.mBadConstraint) {
-      *aOutBadConstraint = netConstraints.mBadConstraint;
-      return NS_ERROR_FAILURE;
-    }
-
-    NormalizedConstraints newConstraint(updatedConstraint);
-    nsresult rv = UpdateSingleSource(aHandle, netConstraints, newConstraint, aPrefs, aDeviceId,
-                                     aOutBadConstraint);
-    if (NS_FAILED(rv)) {
-      return rv;
-    }
-    if (aHandle && aConstraintsUpdate) {
-      aHandle->mConstraints = *aConstraintsUpdate;
-    }
-    return NS_OK;
-  }
-
-  void AssertIsOnOwningThread()
-  {
-    NS_ASSERT_OWNINGTHREAD(MediaEngineSource);
-  }
-
-  MediaEngineState mState;
-
-  NS_DECL_OWNINGTHREAD
-
-  nsTArray<RefPtr<AllocationHandle>> mRegisteredHandles;
-  bool mInShutdown;
-
-  // The following is accessed on main-thread only. It has its own ref-count to
-  // avoid ref-counting MediaEngineSource itself in runnables.
-  // (MediaEngineSource subclasses balk on ref-counts too late during shutdown.)
-  RefPtr<media::Refcountable<dom::MediaTrackSettings>> mSettings;
-};
-
-class MediaEngineVideoSource : public MediaEngineSource
-{
-public:
-  virtual ~MediaEngineVideoSource() {}
-
-protected:
-  explicit MediaEngineVideoSource(MediaEngineState aState)
-    : MediaEngineSource(aState) {}
-  MediaEngineVideoSource()
-    : MediaEngineSource(kReleased) {}
-};
-
-/**
- * Audio source and friends.
- */
-class MediaEngineAudioSource : public MediaEngineSource,
-                               public AudioDataListenerInterface
-{
-public:
-  virtual ~MediaEngineAudioSource() {}
-
-protected:
-  explicit MediaEngineAudioSource(MediaEngineState aState)
-    : MediaEngineSource(aState) {}
-  MediaEngineAudioSource()
-    : MediaEngineSource(kReleased) {}
-
-};
-
 } // namespace mozilla
 
 #endif /* MEDIAENGINE_H_ */
deleted file mode 100644
--- a/dom/media/webrtc/MediaEngineCameraVideoSource.cpp
+++ /dev/null
@@ -1,441 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "MediaEngineCameraVideoSource.h"
-
-#include "mozilla/IntegerPrintfMacros.h"
-
-#include <limits>
-
-namespace mozilla {
-
-using namespace mozilla::gfx;
-using namespace mozilla::dom;
-
-extern LogModule* GetMediaManagerLog();
-#define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
-#define LOGFRAME(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
-
-// guts for appending data to the MSG track
-bool MediaEngineCameraVideoSource::AppendToTrack(SourceMediaStream* aSource,
-                                                 layers::Image* aImage,
-                                                 TrackID aID,
-                                                 StreamTime delta,
-                                                 const PrincipalHandle& aPrincipalHandle)
-{
-  MOZ_ASSERT(aSource);
-
-  VideoSegment segment;
-  RefPtr<layers::Image> image = aImage;
-  IntSize size = image ? image->GetSize() : IntSize(0, 0);
-
-  segment.AppendFrame(image.forget(), delta, size, aPrincipalHandle);
-
-  // This is safe from any thread, and is safe if the track is Finished
-  // or Destroyed.
-  // This can fail if either a) we haven't added the track yet, or b)
-  // we've removed or finished the track.
-  return aSource->AppendToTrack(aID, &(segment));
-}
-
-// Sub-classes (B2G or desktop) should overload one of both of these two methods
-// to provide capabilities
-size_t
-MediaEngineCameraVideoSource::NumCapabilities() const
-{
-  return mHardcodedCapabilities.Length();
-}
-
-void
-MediaEngineCameraVideoSource::GetCapability(size_t aIndex,
-                                            webrtc::CaptureCapability& aOut) const
-{
-  MOZ_ASSERT(aIndex < mHardcodedCapabilities.Length());
-  aOut = mHardcodedCapabilities.SafeElementAt(aIndex, webrtc::CaptureCapability());
-}
-
-uint32_t
-MediaEngineCameraVideoSource::GetDistance(
-    const webrtc::CaptureCapability& aCandidate,
-    const NormalizedConstraintSet &aConstraints,
-    const nsString& aDeviceId,
-    const DistanceCalculation aCalculate) const
-{
-  if (aCalculate == kFeasibility) {
-    return GetFeasibilityDistance(aCandidate, aConstraints, aDeviceId);
-  }
-  return GetFitnessDistance(aCandidate, aConstraints, aDeviceId);
-}
-
-uint32_t
-MediaEngineCameraVideoSource::GetFitnessDistance(
-    const webrtc::CaptureCapability& aCandidate,
-    const NormalizedConstraintSet &aConstraints,
-    const nsString& aDeviceId) const
-{
-  // Treat width|height|frameRate == 0 on capability as "can do any".
-  // This allows for orthogonal capabilities that are not in discrete steps.
-
-  uint64_t distance =
-    uint64_t(FitnessDistance(aDeviceId, aConstraints.mDeviceId)) +
-    uint64_t(FitnessDistance(mFacingMode, aConstraints.mFacingMode)) +
-    uint64_t(aCandidate.width? FitnessDistance(int32_t(aCandidate.width),
-                                               aConstraints.mWidth) : 0) +
-    uint64_t(aCandidate.height? FitnessDistance(int32_t(aCandidate.height),
-                                                aConstraints.mHeight) : 0) +
-    uint64_t(aCandidate.maxFPS? FitnessDistance(double(aCandidate.maxFPS),
-                                                aConstraints.mFrameRate) : 0);
-  return uint32_t(std::min(distance, uint64_t(UINT32_MAX)));
-}
-
-uint32_t
-MediaEngineCameraVideoSource::GetFeasibilityDistance(
-    const webrtc::CaptureCapability& aCandidate,
-    const NormalizedConstraintSet &aConstraints,
-    const nsString& aDeviceId) const
-{
-  // Treat width|height|frameRate == 0 on capability as "can do any".
-  // This allows for orthogonal capabilities that are not in discrete steps.
-
-  uint64_t distance =
-    uint64_t(FitnessDistance(aDeviceId, aConstraints.mDeviceId)) +
-    uint64_t(FitnessDistance(mFacingMode, aConstraints.mFacingMode)) +
-    uint64_t(aCandidate.width? FeasibilityDistance(int32_t(aCandidate.width),
-                                               aConstraints.mWidth) : 0) +
-    uint64_t(aCandidate.height? FeasibilityDistance(int32_t(aCandidate.height),
-                                                aConstraints.mHeight) : 0) +
-    uint64_t(aCandidate.maxFPS? FeasibilityDistance(double(aCandidate.maxFPS),
-                                                aConstraints.mFrameRate) : 0);
-  return uint32_t(std::min(distance, uint64_t(UINT32_MAX)));
-}
-
-// Find best capability by removing inferiors. May leave >1 of equal distance
-
-/* static */ void
-MediaEngineCameraVideoSource::TrimLessFitCandidates(CapabilitySet& set) {
-  uint32_t best = UINT32_MAX;
-  for (auto& candidate : set) {
-    if (best > candidate.mDistance) {
-      best = candidate.mDistance;
-    }
-  }
-  for (size_t i = 0; i < set.Length();) {
-    if (set[i].mDistance > best) {
-      set.RemoveElementAt(i);
-    } else {
-      ++i;
-    }
-  }
-  MOZ_ASSERT(set.Length());
-}
-
-// GetBestFitnessDistance returns the best distance the capture device can offer
-// as a whole, given an accumulated number of ConstraintSets.
-// Ideal values are considered in the first ConstraintSet only.
-// Plain values are treated as Ideal in the first ConstraintSet.
-// Plain values are treated as Exact in subsequent ConstraintSets.
-// Infinity = UINT32_MAX e.g. device cannot satisfy accumulated ConstraintSets.
-// A finite result may be used to calculate this device's ranking as a choice.
-
-uint32_t
-MediaEngineCameraVideoSource::GetBestFitnessDistance(
-    const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
-    const nsString& aDeviceId) const
-{
-  size_t num = NumCapabilities();
-
-  CapabilitySet candidateSet;
-  for (size_t i = 0; i < num; i++) {
-    candidateSet.AppendElement(i);
-  }
-
-  bool first = true;
-  for (const NormalizedConstraintSet* ns : aConstraintSets) {
-    for (size_t i = 0; i < candidateSet.Length();  ) {
-      auto& candidate = candidateSet[i];
-      webrtc::CaptureCapability cap;
-      GetCapability(candidate.mIndex, cap);
-      uint32_t distance = GetFitnessDistance(cap, *ns, aDeviceId);
-      if (distance == UINT32_MAX) {
-        candidateSet.RemoveElementAt(i);
-      } else {
-        ++i;
-        if (first) {
-          candidate.mDistance = distance;
-        }
-      }
-    }
-    first = false;
-  }
-  if (!candidateSet.Length()) {
-    return UINT32_MAX;
-  }
-  TrimLessFitCandidates(candidateSet);
-  return candidateSet[0].mDistance;
-}
-
-void
-MediaEngineCameraVideoSource::LogConstraints(
-    const NormalizedConstraintSet& aConstraints)
-{
-  auto& c = aConstraints;
-  if (c.mWidth.mIdeal.isSome()) {
-    LOG(("Constraints: width: { min: %d, max: %d, ideal: %d }",
-         c.mWidth.mMin, c.mWidth.mMax,
-         c.mWidth.mIdeal.valueOr(0)));
-  } else {
-    LOG(("Constraints: width: { min: %d, max: %d }",
-         c.mWidth.mMin, c.mWidth.mMax));
-  }
-  if (c.mHeight.mIdeal.isSome()) {
-    LOG(("             height: { min: %d, max: %d, ideal: %d }",
-         c.mHeight.mMin, c.mHeight.mMax,
-         c.mHeight.mIdeal.valueOr(0)));
-  } else {
-    LOG(("             height: { min: %d, max: %d }",
-         c.mHeight.mMin, c.mHeight.mMax));
-  }
-  if (c.mFrameRate.mIdeal.isSome()) {
-    LOG(("             frameRate: { min: %f, max: %f, ideal: %f }",
-         c.mFrameRate.mMin, c.mFrameRate.mMax,
-         c.mFrameRate.mIdeal.valueOr(0)));
-  } else {
-    LOG(("             frameRate: { min: %f, max: %f }",
-         c.mFrameRate.mMin, c.mFrameRate.mMax));
-  }
-}
-
-void
-MediaEngineCameraVideoSource::LogCapability(const char* aHeader,
-    const webrtc::CaptureCapability &aCapability, uint32_t aDistance)
-{
-  // RawVideoType and VideoCodecType media/webrtc/trunk/webrtc/common_types.h
-  static const char* const types[] = {
-    "I420",
-    "YV12",
-    "YUY2",
-    "UYVY",
-    "IYUV",
-    "ARGB",
-    "RGB24",
-    "RGB565",
-    "ARGB4444",
-    "ARGB1555",
-    "MJPEG",
-    "NV12",
-    "NV21",
-    "BGRA",
-    "Unknown type"
-  };
-
-  static const char* const codec[] = {
-    "VP8",
-    "VP9",
-    "H264",
-    "I420",
-    "RED",
-    "ULPFEC",
-    "Generic codec",
-    "Unknown codec"
-  };
-
-  LOG(("%s: %4u x %4u x %2u maxFps, %s, %s. Distance = %" PRIu32,
-       aHeader, aCapability.width, aCapability.height, aCapability.maxFPS,
-       types[std::min(std::max(uint32_t(0), uint32_t(aCapability.rawType)),
-                      uint32_t(sizeof(types) / sizeof(*types) - 1))],
-       codec[std::min(std::max(uint32_t(0), uint32_t(aCapability.codecType)),
-                      uint32_t(sizeof(codec) / sizeof(*codec) - 1))],
-       aDistance));
-}
-
-bool
-MediaEngineCameraVideoSource::ChooseCapability(
-    const NormalizedConstraints &aConstraints,
-    const MediaEnginePrefs &aPrefs,
-    const nsString& aDeviceId,
-    webrtc::CaptureCapability& aCapability,
-    const DistanceCalculation aCalculate)
-{
-  if (MOZ_LOG_TEST(GetMediaManagerLog(), LogLevel::Debug)) {
-    LOG(("ChooseCapability: prefs: %dx%d @%dfps",
-         aPrefs.GetWidth(), aPrefs.GetHeight(),
-         aPrefs.mFPS));
-    LogConstraints(aConstraints);
-    if (!aConstraints.mAdvanced.empty()) {
-      LOG(("Advanced array[%zu]:", aConstraints.mAdvanced.size()));
-      for (auto& advanced : aConstraints.mAdvanced) {
-        LogConstraints(advanced);
-      }
-    }
-  }
-
-  size_t num = NumCapabilities();
-
-  CapabilitySet candidateSet;
-  for (size_t i = 0; i < num; i++) {
-    candidateSet.AppendElement(i);
-  }
-
-  // First, filter capabilities by required constraints (min, max, exact).
-
-  for (size_t i = 0; i < candidateSet.Length();) {
-    auto& candidate = candidateSet[i];
-    webrtc::CaptureCapability cap;
-    GetCapability(candidate.mIndex, cap);
-    candidate.mDistance = GetDistance(cap, aConstraints, aDeviceId, aCalculate);
-    LogCapability("Capability", cap, candidate.mDistance);
-    if (candidate.mDistance == UINT32_MAX) {
-      candidateSet.RemoveElementAt(i);
-    } else {
-      ++i;
-    }
-  }
-
-  if (!candidateSet.Length()) {
-    LOG(("failed to find capability match from %zu choices",num));
-    return false;
-  }
-
-  // Filter further with all advanced constraints (that don't overconstrain).
-
-  for (const auto &cs : aConstraints.mAdvanced) {
-    CapabilitySet rejects;
-    for (size_t i = 0; i < candidateSet.Length();) {
-      auto& candidate = candidateSet[i];
-      webrtc::CaptureCapability cap;
-      GetCapability(candidate.mIndex, cap);
-      if (GetDistance(cap, cs, aDeviceId, aCalculate) == UINT32_MAX) {
-        rejects.AppendElement(candidate);
-        candidateSet.RemoveElementAt(i);
-      } else {
-        ++i;
-      }
-    }
-    if (!candidateSet.Length()) {
-      candidateSet.AppendElements(Move(rejects));
-    }
-  }
-  MOZ_ASSERT(candidateSet.Length(),
-             "advanced constraints filtering step can't reduce candidates to zero");
-
-  // Remaining algorithm is up to the UA.
-
-  TrimLessFitCandidates(candidateSet);
-
-  // Any remaining multiples all have the same distance. A common case of this
-  // occurs when no ideal is specified. Lean toward defaults.
-  uint32_t sameDistance = candidateSet[0].mDistance;
-  {
-    MediaTrackConstraintSet prefs;
-    prefs.mWidth.SetAsLong() = aPrefs.GetWidth();
-    prefs.mHeight.SetAsLong() = aPrefs.GetHeight();
-    prefs.mFrameRate.SetAsDouble() = aPrefs.mFPS;
-    NormalizedConstraintSet normPrefs(prefs, false);
-
-    for (auto& candidate : candidateSet) {
-      webrtc::CaptureCapability cap;
-      GetCapability(candidate.mIndex, cap);
-      candidate.mDistance = GetDistance(cap, normPrefs, aDeviceId, aCalculate);
-    }
-    TrimLessFitCandidates(candidateSet);
-  }
-
-  // Any remaining multiples all have the same distance, but may vary on
-  // format. Some formats are more desirable for certain use like WebRTC.
-  // E.g. I420 over RGB24 can remove a needless format conversion.
-
-  bool found = false;
-  for (auto& candidate : candidateSet) {
-    webrtc::CaptureCapability cap;
-    GetCapability(candidate.mIndex, cap);
-    if (cap.rawType == webrtc::RawVideoType::kVideoI420 ||
-        cap.rawType == webrtc::RawVideoType::kVideoYUY2 ||
-        cap.rawType == webrtc::RawVideoType::kVideoYV12) {
-      aCapability = cap;
-      found = true;
-      break;
-    }
-  }
-  if (!found) {
-    GetCapability(candidateSet[0].mIndex, aCapability);
-  }
-
-  LogCapability("Chosen capability", aCapability, sameDistance);
-  return true;
-}
-
-void
-MediaEngineCameraVideoSource::SetName(nsString aName)
-{
-  mDeviceName = aName;
-  bool hasFacingMode = false;
-  VideoFacingModeEnum facingMode = VideoFacingModeEnum::User;
-
-  // Set facing mode based on device name.
-#if defined(ANDROID)
-  // Names are generated. Example: "Camera 0, Facing back, Orientation 90"
-  //
-  // See media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/
-  // webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
-
-  if (aName.Find(NS_LITERAL_STRING("Facing back")) != kNotFound) {
-    hasFacingMode = true;
-    facingMode = VideoFacingModeEnum::Environment;
-  } else if (aName.Find(NS_LITERAL_STRING("Facing front")) != kNotFound) {
-    hasFacingMode = true;
-    facingMode = VideoFacingModeEnum::User;
-  }
-#endif // ANDROID
-#ifdef XP_MACOSX
-  // Kludge to test user-facing cameras on OSX.
-  if (aName.Find(NS_LITERAL_STRING("Face")) != -1) {
-    hasFacingMode = true;
-    facingMode = VideoFacingModeEnum::User;
-  }
-#endif
-#ifdef XP_WIN
-  // The cameras' name of Surface book are "Microsoft Camera Front" and
-  // "Microsoft Camera Rear" respectively.
-
-  if (aName.Find(NS_LITERAL_STRING("Front")) != kNotFound) {
-    hasFacingMode = true;
-    facingMode = VideoFacingModeEnum::User;
-  } else if (aName.Find(NS_LITERAL_STRING("Rear")) != kNotFound) {
-    hasFacingMode = true;
-    facingMode = VideoFacingModeEnum::Environment;
-  }
-#endif // WINDOWS
-  if (hasFacingMode) {
-    mFacingMode.Assign(NS_ConvertUTF8toUTF16(
-        VideoFacingModeEnumValues::strings[uint32_t(facingMode)].value));
-  } else {
-    mFacingMode.Truncate();
-  }
-}
-
-void
-MediaEngineCameraVideoSource::GetName(nsAString& aName) const
-{
-  aName = mDeviceName;
-}
-
-void
-MediaEngineCameraVideoSource::SetUUID(const char* aUUID)
-{
-  mUniqueId.Assign(aUUID);
-}
-
-void
-MediaEngineCameraVideoSource::GetUUID(nsACString& aUUID) const
-{
-  aUUID = mUniqueId;
-}
-
-const nsCString&
-MediaEngineCameraVideoSource::GetUUID() const
-{
-  return mUniqueId;
-}
-
-} // namespace mozilla
deleted file mode 100644
--- a/dom/media/webrtc/MediaEngineCameraVideoSource.h
+++ /dev/null
@@ -1,168 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef MediaEngineCameraVideoSource_h
-#define MediaEngineCameraVideoSource_h
-
-#include "MediaEngine.h"
-
-#include "nsDirectoryServiceDefs.h"
-#include "mozilla/Unused.h"
-
-// conflicts with #include of scoped_ptr.h
-#undef FF
-// Avoid warnings about redefinition of WARN_UNUSED_RESULT
-#include "ipc/IPCMessageUtils.h"
-
-// WebRTC includes
-#include "webrtc/modules/video_capture/video_capture_defines.h"
-
-namespace webrtc {
-  using CaptureCapability = VideoCaptureCapability;
-}
-
-namespace mozilla {
-
-// Fitness distance is defined in
-// https://www.w3.org/TR/2017/CR-mediacapture-streams-20171003/#dfn-selectsettings
-// The main difference of feasibility and fitness distance is that if the
-// constraint is required ('max', or 'exact'), and the settings dictionary's value
-// for the constraint does not satisfy the constraint, the fitness distance is
-// positive infinity. Given a continuous space of settings dictionaries comprising
-// all discrete combinations of dimension and frame-rate related properties,
-// the feasibility distance is still in keeping with the constraints algorithm.
-enum DistanceCalculation {
-  kFitness,
-  kFeasibility
-};
-
-class MediaEngineCameraVideoSource : public MediaEngineVideoSource
-{
-public:
-  // Some subclasses use an index to track multiple instances.
-  explicit MediaEngineCameraVideoSource(int aIndex,
-                                        const char* aMonitorName = "Camera.Monitor")
-    : MediaEngineVideoSource(kReleased)
-    , mMonitor(aMonitorName)
-    , mWidth(0)
-    , mHeight(0)
-    , mInitDone(false)
-    , mCaptureIndex(aIndex)
-    , mTrackID(0)
-  {}
-
-  explicit MediaEngineCameraVideoSource(const char* aMonitorName = "Camera.Monitor")
-    : MediaEngineCameraVideoSource(0, aMonitorName) {}
-
-  void GetName(nsAString& aName) const override;
-  void GetUUID(nsACString& aUUID) const override;
-
-  bool IsFake() override
-  {
-    return false;
-  }
-
-  nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
-  {
-    return NS_ERROR_NOT_IMPLEMENTED;
-  }
-
-  uint32_t GetBestFitnessDistance(
-      const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
-      const nsString& aDeviceId) const override;
-
-  void Shutdown() override
-  {
-    MonitorAutoLock lock(mMonitor);
-    // really Stop() *should* be called before it gets here
-    Unused << NS_WARN_IF(mImage);
-    mImage = nullptr;
-    mImageContainer = nullptr;
-  }
-
-protected:
-  struct CapabilityCandidate {
-    explicit CapabilityCandidate(uint8_t index, uint32_t distance = 0)
-    : mIndex(index), mDistance(distance) {}
-
-    size_t mIndex;
-    uint32_t mDistance;
-  };
-  typedef nsTArray<CapabilityCandidate> CapabilitySet;
-
-  ~MediaEngineCameraVideoSource() {}
-
-  // guts for appending data to the MSG track
-  virtual bool AppendToTrack(SourceMediaStream* aSource,
-                             layers::Image* aImage,
-                             TrackID aID,
-                             StreamTime delta,
-                             const PrincipalHandle& aPrincipalHandle);
-  uint32_t GetDistance(const webrtc::CaptureCapability& aCandidate,
-                       const NormalizedConstraintSet &aConstraints,
-                       const nsString& aDeviceId,
-                       const DistanceCalculation aCalculate) const;
-  uint32_t GetFitnessDistance(const webrtc::CaptureCapability& aCandidate,
-                              const NormalizedConstraintSet &aConstraints,
-                              const nsString& aDeviceId) const;
-  uint32_t GetFeasibilityDistance(const webrtc::CaptureCapability& aCandidate,
-                              const NormalizedConstraintSet &aConstraints,
-                              const nsString& aDeviceId) const;
-  static void TrimLessFitCandidates(CapabilitySet& set);
-  static void LogConstraints(const NormalizedConstraintSet& aConstraints);
-  static void LogCapability(const char* aHeader,
-                            const webrtc::CaptureCapability &aCapability,
-                            uint32_t aDistance);
-  virtual size_t NumCapabilities() const;
-  virtual void GetCapability(size_t aIndex, webrtc::CaptureCapability& aOut) const;
-  virtual bool ChooseCapability(
-    const NormalizedConstraints &aConstraints,
-    const MediaEnginePrefs &aPrefs,
-    const nsString& aDeviceId,
-    webrtc::CaptureCapability& aCapability,
-    const DistanceCalculation aCalculate
-  );
-  void SetName(nsString aName);
-  void SetUUID(const char* aUUID);
-  const nsCString& GetUUID() const; // protected access
-
-  // Engine variables.
-
-  // mMonitor protects mImage access/changes, and transitions of mState
-  // from kStarted to kStopped (which are combined with EndTrack() and
-  // image changes).
-  // mMonitor also protects mSources[] and mPrincipalHandles[] access/changes.
-  // mSources[] and mPrincipalHandles[] are accessed from webrtc threads.
-
-  // All the mMonitor accesses are from the child classes.
-  Monitor mMonitor; // Monitor for processing Camera frames.
-  nsTArray<RefPtr<SourceMediaStream>> mSources; // When this goes empty, we shut down HW
-  nsTArray<PrincipalHandle> mPrincipalHandles; // Directly mapped to mSources.
-  RefPtr<layers::Image> mImage;
-  nsTArray<RefPtr<layers::Image>> mImages;
-  nsTArray<webrtc::CaptureCapability> mTargetCapabilities;
-  nsTArray<uint64_t> mHandleIds;
-  RefPtr<layers::ImageContainer> mImageContainer;
-  // end of data protected by mMonitor
-
-  int mWidth, mHeight;
-  bool mInitDone;
-  int mCaptureIndex;
-  TrackID mTrackID;
-
-  webrtc::CaptureCapability mCapability;
-  webrtc::CaptureCapability mTargetCapability;
-  uint64_t mHandleId;
-
-  mutable nsTArray<webrtc::CaptureCapability> mHardcodedCapabilities;
-private:
-  nsString mDeviceName;
-  nsCString mUniqueId;
-  nsString mFacingMode;
-};
-
-
-} // namespace mozilla
-
-#endif // MediaEngineCameraVideoSource_h
--- a/dom/media/webrtc/MediaEngineDefault.cpp
+++ b/dom/media/webrtc/MediaEngineDefault.cpp
@@ -1,145 +1,140 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaEngineDefault.h"
 
-#include "nsCOMPtr.h"
-#include "mozilla/dom/File.h"
-#include "mozilla/UniquePtr.h"
-#include "nsIFile.h"
-#include "Layers.h"
 #include "ImageContainer.h"
 #include "ImageTypes.h"
-#include "nsContentUtils.h"
+#include "Layers.h"
 #include "MediaStreamGraph.h"
-
+#include "MediaTrackConstraints.h"
+#include "mozilla/dom/File.h"
+#include "mozilla/UniquePtr.h"
+#include "nsCOMPtr.h"
+#include "nsContentUtils.h"
+#include "nsIFile.h"
 #include "nsIFilePicker.h"
+#include "nsIPrefBranch.h"
 #include "nsIPrefService.h"
-#include "nsIPrefBranch.h"
 
 #ifdef MOZ_WIDGET_ANDROID
 #include "nsISupportsUtils.h"
 #endif
 
 #ifdef MOZ_WEBRTC
 #include "YuvStamper.h"
 #endif
 
 #define DEFAULT_AUDIO_TIMER_MS 10
 namespace mozilla {
 
 using namespace mozilla::gfx;
 
-NS_IMPL_ISUPPORTS(MediaEngineDefaultVideoSource, nsITimerCallback, nsINamed)
 /**
  * Default video source.
  */
 
 MediaEngineDefaultVideoSource::MediaEngineDefaultVideoSource()
-#ifdef MOZ_WEBRTC
-  : MediaEngineCameraVideoSource("FakeVideo.Monitor")
-#else
-  : MediaEngineVideoSource()
-#endif
-  , mTimer(nullptr)
-#ifndef MOZ_WEBRTC
-  , mMonitor("Fake video")
-#endif
-  , mCb(16), mCr(16)
-{
-  mImageContainer =
-    layers::LayerManager::CreateImageContainer(layers::ImageContainer::ASYNCHRONOUS);
-}
+  : mTimer(nullptr)
+  , mMutex("MediaEngineDefaultVideoSource::mMutex")
+{}
 
 MediaEngineDefaultVideoSource::~MediaEngineDefaultVideoSource()
 {}
 
-void
-MediaEngineDefaultVideoSource::GetName(nsAString& aName) const
+nsString
+MediaEngineDefaultVideoSource::GetName() const
 {
-  aName.AssignLiteral(u"Default Video Device");
+  return NS_LITERAL_STRING(u"Default Video Device");
 }
 
-void
-MediaEngineDefaultVideoSource::GetUUID(nsACString& aUUID) const
+nsCString
+MediaEngineDefaultVideoSource::GetUUID() const
 {
-  aUUID.AssignLiteral("1041FCBD-3F12-4F7B-9E9B-1EC556DD5676");
+  return NS_LITERAL_CSTRING("1041FCBD-3F12-4F7B-9E9B-1EC556DD5676");
 }
 
 uint32_t
 MediaEngineDefaultVideoSource::GetBestFitnessDistance(
     const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
     const nsString& aDeviceId) const
 {
+  AssertIsOnOwningThread();
+
   uint32_t distance = 0;
 #ifdef MOZ_WEBRTC
   for (const auto* cs : aConstraintSets) {
-    distance = GetMinimumFitnessDistance(*cs, aDeviceId);
+    distance = MediaConstraintsHelper::GetMinimumFitnessDistance(*cs, aDeviceId);
     break; // distance is read from first entry only
   }
 #endif
   return distance;
 }
 
 nsresult
 MediaEngineDefaultVideoSource::Allocate(const dom::MediaTrackConstraints &aConstraints,
                                         const MediaEnginePrefs &aPrefs,
                                         const nsString& aDeviceId,
                                         const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
                                         AllocationHandle** aOutHandle,
                                         const char** aOutBadConstraint)
 {
-  if (mState != kReleased) {
-    return NS_ERROR_FAILURE;
-  }
+  AssertIsOnOwningThread();
+
+  MOZ_ASSERT(mState == kReleased);
 
   FlattenedConstraints c(aConstraints);
 
   // Mock failure for automated tests.
   if (c.mDeviceId.mIdeal.find(NS_LITERAL_STRING("bad device")) !=
       c.mDeviceId.mIdeal.end()) {
     return NS_ERROR_FAILURE;
   }
 
 
   // emulator debug is very, very slow; reduce load on it with smaller/slower fake video
   mOpts = aPrefs;
   mOpts.mWidth = c.mWidth.Get(aPrefs.mWidth ? aPrefs.mWidth :
 #ifdef DEBUG
-                              MediaEngine::DEFAULT_43_VIDEO_WIDTH/2
+                              MediaEnginePrefs::DEFAULT_43_VIDEO_WIDTH/2
 #else
-                              MediaEngine::DEFAULT_43_VIDEO_WIDTH
+                              MediaEnginePrefs::DEFAULT_43_VIDEO_WIDTH
 #endif
                               );
   mOpts.mHeight = c.mHeight.Get(aPrefs.mHeight ? aPrefs.mHeight :
 #ifdef DEBUG
-                                MediaEngine::DEFAULT_43_VIDEO_HEIGHT/2
+                                MediaEnginePrefs::DEFAULT_43_VIDEO_HEIGHT/2
 #else
-                                MediaEngine::DEFAULT_43_VIDEO_HEIGHT
+                                MediaEnginePrefs::DEFAULT_43_VIDEO_HEIGHT
 #endif
                                 );
   mOpts.mWidth = std::max(160, std::min(mOpts.mWidth, 4096)) & ~1;
   mOpts.mHeight = std::max(90, std::min(mOpts.mHeight, 2160)) & ~1;
+  *aOutHandle = nullptr;
+
+  MutexAutoLock lock(mMutex);
   mState = kAllocated;
-  *aOutHandle = nullptr;
   return NS_OK;
 }
 
 nsresult
-MediaEngineDefaultVideoSource::Deallocate(AllocationHandle* aHandle)
+MediaEngineDefaultVideoSource::Deallocate(const RefPtr<const AllocationHandle>& aHandle)
 {
+  AssertIsOnOwningThread();
+
   MOZ_ASSERT(!aHandle);
-  if (mState != kStopped && mState != kAllocated) {
-    return NS_ERROR_FAILURE;
-  }
+  MOZ_ASSERT(!mImage);
+  MOZ_ASSERT(mState == kStopped || mState == kAllocated);
+
+  MutexAutoLock lock(mMutex);
   mState = kReleased;
-  mImage = nullptr;
+  mImageContainer = nullptr;
   return NS_OK;
 }
 
 static void AllocateSolidColorFrame(layers::PlanarYCbCrData& aData,
                                     int aWidth, int aHeight,
                                     int aY, int aCb, int aCr)
 {
   MOZ_ASSERT(!(aWidth&1));
@@ -167,79 +162,100 @@ static void AllocateSolidColorFrame(laye
 }
 
 static void ReleaseFrame(layers::PlanarYCbCrData& aData)
 {
   free(aData.mYChannel);
 }
 
 nsresult
-MediaEngineDefaultVideoSource::Start(SourceMediaStream* aStream, TrackID aID,
+MediaEngineDefaultVideoSource::Start(SourceMediaStream* aStream,
+                                     TrackID aTrackID,
                                      const PrincipalHandle& aPrincipalHandle)
 {
-  if (mState != kAllocated) {
-    return NS_ERROR_FAILURE;
-  }
+  AssertIsOnOwningThread();
+
+  MOZ_ASSERT(mState == kAllocated, "Allocate() must happen before Start()");
 
   mTimer = NS_NewTimer();
   if (!mTimer) {
     return NS_ERROR_FAILURE;
   }
 
-  aStream->AddTrack(aID, 0, new VideoSegment(), SourceMediaStream::ADDTRACK_QUEUED);
-
-  // Remember TrackID so we can end it later
-  mTrackID = aID;
+  if (!mImageContainer) {
+    mImageContainer =
+      layers::LayerManager::CreateImageContainer(layers::ImageContainer::ASYNCHRONOUS);
+  }
 
   // Start timer for subsequent frames
+  uint32_t interval;
 #if defined(MOZ_WIDGET_ANDROID) && defined(DEBUG)
 // emulator debug is very, very slow and has problems dealing with realtime audio inputs
-  mTimer->InitWithCallback(this, (1000 / mOpts.mFPS)*10, nsITimer::TYPE_REPEATING_SLACK);
+  interval = 10 * (1000 / mOpts.mFPS);
 #else
-  mTimer->InitWithCallback(this, 1000 / mOpts.mFPS, nsITimer::TYPE_REPEATING_SLACK);
+  interval = 1000 / mOpts.mFPS;
 #endif
+  mTimer->InitWithNamedFuncCallback([](nsITimer* aTimer, void* aClosure) {
+      RefPtr<MediaEngineDefaultVideoSource> source =
+        static_cast<MediaEngineDefaultVideoSource*>(aClosure);
+      source->GenerateFrame();
+    }, this, interval, nsITimer::TYPE_REPEATING_SLACK,
+    "MediaEngineDefaultVideoSource::GenerateFrame");
+
+  aStream->AddTrack(aTrackID, 0, new VideoSegment(), SourceMediaStream::ADDTRACK_QUEUED);
+
+  MutexAutoLock lock(mMutex);
+  // Remember Stream and TrackID so we can end it later
+  mStream = aStream;
+  mTrackID = aTrackID;
+
   mState = kStarted;
-
   return NS_OK;
 }
 
 nsresult
-MediaEngineDefaultVideoSource::Stop(SourceMediaStream *aSource, TrackID aID)
+MediaEngineDefaultVideoSource::Stop(SourceMediaStream *aStream, TrackID aTrackID)
 {
-  if (mState != kStarted) {
-    return NS_ERROR_FAILURE;
-  }
-  if (!mTimer) {
-    return NS_ERROR_FAILURE;
-  }
+  AssertIsOnOwningThread();
+
+  MOZ_ASSERT(mState == kStarted);
+  MOZ_ASSERT(mTimer);
 
   mTimer->Cancel();
   mTimer = nullptr;
+  aStream->EndTrack(aTrackID);
 
-  aSource->EndTrack(aID);
+  MutexAutoLock lock(mMutex);
+  MOZ_ASSERT(mStream == aStream);
+  MOZ_ASSERT(mTrackID == aTrackID);
+
+  mStream = nullptr;
+  mTrackID = TRACK_NONE;
+  mImage = nullptr;
 
   mState = kStopped;
-  mImage = nullptr;
   return NS_OK;
 }
 
 nsresult
-MediaEngineDefaultVideoSource::Restart(
-    AllocationHandle* aHandle,
+MediaEngineDefaultVideoSource::Reconfigure(
+    const RefPtr<AllocationHandle>& aHandle,
     const dom::MediaTrackConstraints& aConstraints,
     const MediaEnginePrefs &aPrefs,
     const nsString& aDeviceId,
     const char** aOutBadConstraint)
 {
   return NS_OK;
 }
 
-NS_IMETHODIMP
-MediaEngineDefaultVideoSource::Notify(nsITimer* aTimer)
+void
+MediaEngineDefaultVideoSource::GenerateFrame()
 {
+  AssertIsOnOwningThread();
+
   // Update the target color
   if (mCr <= 16) {
     if (mCb < 240) {
       mCb++;
     } else {
       mCr++;
     }
   } else if (mCb >= 240) {
@@ -273,183 +289,186 @@ MediaEngineDefaultVideoSource::Notify(ns
 
   bool setData = ycbcr_image->CopyData(data);
   MOZ_ASSERT(setData);
 
   // SetData copies data, so we can free the frame
   ReleaseFrame(data);
 
   if (!setData) {
-    return NS_ERROR_FAILURE;
+    return;
   }
 
-  MonitorAutoLock lock(mMonitor);
-
-  // implicitly releases last image
-  mImage = ycbcr_image.forget();
-
-  return NS_OK;
-}
-
-NS_IMETHODIMP
-MediaEngineDefaultVideoSource::GetName(nsACString& aName)
-{
-  aName.AssignLiteral("MediaEngineDefaultVideoSource");
-  return NS_OK;
+  MutexAutoLock lock(mMutex);
+  mImage = Move(ycbcr_image);
 }
 
 void
-MediaEngineDefaultVideoSource::NotifyPull(MediaStreamGraph* aGraph,
-                                          SourceMediaStream *aSource,
-                                          TrackID aID,
-                                          StreamTime aDesiredTime,
-                                          const PrincipalHandle& aPrincipalHandle)
+MediaEngineDefaultVideoSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
+                                    const RefPtr<SourceMediaStream>& aStream,
+                                    TrackID aTrackID,
+                                    StreamTime aDesiredTime,
+                                    const PrincipalHandle& aPrincipalHandle)
 {
-  // AddTrack takes ownership of segment
+  // AppendFrame takes ownership of `segment`
   VideoSegment segment;
-  MonitorAutoLock lock(mMonitor);
-  if (mState != kStarted) {
-    return;
+
+  RefPtr<layers::Image> image;
+  {
+    MutexAutoLock lock(mMutex);
+    if (mState != kStarted) {
+      return;
+    }
+
+    MOZ_ASSERT(mStream == aStream);
+    MOZ_ASSERT(mTrackID == aTrackID);
+    image = mImage;
   }
 
-  // Note: we're not giving up mImage here
-  RefPtr<layers::Image> image = mImage;
-  StreamTime delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
-
+  StreamTime delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
   if (delta > 0) {
     // nullptr images are allowed
     IntSize size(image ? mOpts.mWidth : 0, image ? mOpts.mHeight : 0);
     segment.AppendFrame(image.forget(), delta, size, aPrincipalHandle);
     // This can fail if either a) we haven't added the track yet, or b)
     // we've removed or finished the track.
-    aSource->AppendToTrack(aID, &segment);
+    aStream->AppendToTrack(aTrackID, &segment);
   }
 }
 
 /**
  * Default audio source.
  */
 
-NS_IMPL_ISUPPORTS0(MediaEngineDefaultAudioSource)
-
 MediaEngineDefaultAudioSource::MediaEngineDefaultAudioSource()
-  : MediaEngineAudioSource(kReleased)
-  , mLastNotify(0)
-  , mFreq(1000)
+  : mMutex("MediaEngineDefaultAudioSource::mMutex")
 {}
 
 MediaEngineDefaultAudioSource::~MediaEngineDefaultAudioSource()
 {}
 
-void
-MediaEngineDefaultAudioSource::GetName(nsAString& aName) const
+nsString
+MediaEngineDefaultAudioSource::GetName() const
 {
-  aName.AssignLiteral(u"Default Audio Device");
+  return NS_LITERAL_STRING(u"Default Audio Device");
 }
 
-void
-MediaEngineDefaultAudioSource::GetUUID(nsACString& aUUID) const
+nsCString
+MediaEngineDefaultAudioSource::GetUUID() const
 {
-  aUUID.AssignLiteral("B7CBD7C1-53EF-42F9-8353-73F61C70C092");
+  return NS_LITERAL_CSTRING("B7CBD7C1-53EF-42F9-8353-73F61C70C092");
 }
 
 uint32_t
 MediaEngineDefaultAudioSource::GetBestFitnessDistance(
     const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
     const nsString& aDeviceId) const
 {
   uint32_t distance = 0;
 #ifdef MOZ_WEBRTC
   for (const auto* cs : aConstraintSets) {
-    distance = GetMinimumFitnessDistance(*cs, aDeviceId);
+    distance = MediaConstraintsHelper::GetMinimumFitnessDistance(*cs, aDeviceId);
     break; // distance is read from first entry only
   }
 #endif
   return distance;
 }
 
 nsresult
 MediaEngineDefaultAudioSource::Allocate(const dom::MediaTrackConstraints &aConstraints,
                                         const MediaEnginePrefs &aPrefs,
                                         const nsString& aDeviceId,
                                         const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
                                         AllocationHandle** aOutHandle,
                                         const char** aOutBadConstraint)
 {
-  if (mState != kReleased) {
-    return NS_ERROR_FAILURE;
-  }
+  AssertIsOnOwningThread();
+
+  MOZ_ASSERT(mState == kReleased);
 
   // Mock failure for automated tests.
   if (aConstraints.mDeviceId.IsString() &&
       aConstraints.mDeviceId.GetAsString().EqualsASCII("bad device")) {
     return NS_ERROR_FAILURE;
   }
 
   mFreq = aPrefs.mFreq ? aPrefs.mFreq : 1000;
+  *aOutHandle = nullptr;
+
+  MutexAutoLock lock(mMutex);
   mState = kAllocated;
-  *aOutHandle = nullptr;
   return NS_OK;
 }
 
 nsresult
-MediaEngineDefaultAudioSource::Deallocate(AllocationHandle* aHandle)
+MediaEngineDefaultAudioSource::Deallocate(const RefPtr<const AllocationHandle>& aHandle)
 {
+  AssertIsOnOwningThread();
+
   MOZ_ASSERT(!aHandle);
-  if (mState != kStopped && mState != kAllocated) {
-    return NS_ERROR_FAILURE;
-  }
+  MOZ_ASSERT(mState == kStopped || mState == kAllocated);
+
+  MutexAutoLock lock(mMutex);
   mState = kReleased;
   return NS_OK;
 }
 
 nsresult
-MediaEngineDefaultAudioSource::Start(SourceMediaStream* aStream, TrackID aID,
+MediaEngineDefaultAudioSource::Start(SourceMediaStream* aStream,
+                                     TrackID aTrackID,
                                      const PrincipalHandle& aPrincipalHandle)
 {
-  if (mState != kAllocated) {
-    return NS_ERROR_FAILURE;
-  }
+
+  AssertIsOnOwningThread();
+
+  MOZ_ASSERT(mState == kAllocated);
 
+  // AddAudioTrack will take ownership of segment
+  mStream = aStream;
+  mTrackID = aTrackID;
+  aStream->AddAudioTrack(aTrackID,
+                         aStream->GraphRate(),
+                         0,
+                         new AudioSegment(),
+                         SourceMediaStream::ADDTRACK_QUEUED);
 
   if (!mSineGenerator) {
     // generate sine wave (default 1KHz)
     mSineGenerator = new SineWaveGenerator(aStream->GraphRate(), mFreq);
   }
 
-  // AddTrack will take ownership of segment
-  AudioSegment* segment = new AudioSegment();
-  aStream->AddAudioTrack(aID, aStream->GraphRate(), 0, segment, SourceMediaStream::ADDTRACK_QUEUED);
+  mLastNotify = 0;
 
-  // Remember TrackID so we can finish later
-  mTrackID = aID;
-
-  mLastNotify = 0;
+  MutexAutoLock lock(mMutex);
   mState = kStarted;
   return NS_OK;
 }
 
 nsresult
-MediaEngineDefaultAudioSource::Stop(SourceMediaStream *aSource, TrackID aID)
+MediaEngineDefaultAudioSource::Stop(SourceMediaStream *aStream,
+                                    TrackID aTrackID)
 {
-  if (mState != kStarted) {
-    return NS_ERROR_FAILURE;
-  }
-  aSource->EndTrack(aID);
+  AssertIsOnOwningThread();
+
+  MOZ_ASSERT(mState == kStarted);
 
+  aStream->EndTrack(aTrackID);
+
+  MutexAutoLock lock(mMutex);
   mState = kStopped;
   return NS_OK;
 }
 
 nsresult
-MediaEngineDefaultAudioSource::Restart(AllocationHandle* aHandle,
-                                       const dom::MediaTrackConstraints& aConstraints,
-                                       const MediaEnginePrefs &aPrefs,
-                                       const nsString& aDeviceId,
-                                       const char** aOutBadConstraint)
+MediaEngineDefaultAudioSource::Reconfigure(
+    const RefPtr<AllocationHandle>& aHandle,
+    const dom::MediaTrackConstraints& aConstraints,
+    const MediaEnginePrefs &aPrefs,
+    const nsString& aDeviceId,
+    const char** aOutBadConstraint)
 {
   return NS_OK;
 }
 
 void
 MediaEngineDefaultAudioSource::AppendToSegment(AudioSegment& aSegment,
                                                TrackTicks aSamples,
                                                const PrincipalHandle& aPrincipalHandle)
@@ -459,67 +478,79 @@ MediaEngineDefaultAudioSource::AppendToS
 
   mSineGenerator->generate(dest, aSamples);
   AutoTArray<const int16_t*,1> channels;
   channels.AppendElement(dest);
   aSegment.AppendFrames(buffer.forget(), channels, aSamples, aPrincipalHandle);
 }
 
 void
-MediaEngineDefaultAudioSource::NotifyPull(MediaStreamGraph* aGraph,
-                                          SourceMediaStream *aSource,
-                                          TrackID aID,
-                                          StreamTime aDesiredTime,
-                                          const PrincipalHandle& aPrincipalHandle)
+MediaEngineDefaultAudioSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
+                                    const RefPtr<SourceMediaStream>& aStream,
+                                    TrackID aTrackID,
+                                    StreamTime aDesiredTime,
+                                    const PrincipalHandle& aPrincipalHandle)
 {
-  MOZ_ASSERT(aID == mTrackID);
   AudioSegment segment;
   // avoid accumulating rounding errors
-  TrackTicks desired = aSource->TimeToTicksRoundUp(aGraph->GraphRate(), aDesiredTime);
+  TrackTicks desired = aStream->TimeToTicksRoundUp(aStream->GraphRate(), aDesiredTime);
   TrackTicks delta = desired - mLastNotify;
   mLastNotify += delta;
   AppendToSegment(segment, delta, aPrincipalHandle);
-  aSource->AppendToTrack(mTrackID, &segment);
+  aStream->AppendToTrack(aTrackID, &segment);
 }
 
 void
-MediaEngineDefault::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
-                                          nsTArray<RefPtr<MediaEngineVideoSource> >* aVSources) {
-  MutexAutoLock lock(mMutex);
+MediaEngineDefault::EnumerateDevices(dom::MediaSourceEnum aMediaSource,
+                                     nsTArray<RefPtr<MediaEngineSource>>* aSources)
+{
+  AssertIsOnOwningThread();
+
+  switch (aMediaSource) {
+    case dom::MediaSourceEnum::Camera: {
+      // Only supports camera video sources. See Bug 1038241.
+
+      // We once had code here to find a VideoSource with the same settings and
+      // re-use that. This is no longer possible since the resolution gets set
+      // in Allocate().
 
-  // only supports camera sources (for now).  See Bug 1038241
-  if (aMediaSource != dom::MediaSourceEnum::Camera) {
-    return;
+      auto newSource = MakeRefPtr<MediaEngineDefaultVideoSource>();
+      mVSources.AppendElement(newSource);
+      aSources->AppendElement(newSource);
+      return;
+    }
+    case dom::MediaSourceEnum::Microphone: {
+      for (const RefPtr<MediaEngineDefaultAudioSource>& source : mASources) {
+        if (source->IsAvailable()) {
+          aSources->AppendElement(source);
+        }
+      }
+
+      if (aSources->IsEmpty()) {
+        // All streams are currently busy, just make a new one.
+        auto newSource = MakeRefPtr<MediaEngineDefaultAudioSource>();
+        mASources.AppendElement(newSource);
+        aSources->AppendElement(newSource);
+      }
+      return;
+    }
+    default:
+      MOZ_ASSERT_UNREACHABLE("Unsupported source type");
+      return;
   }
-
-  // We once had code here to find a VideoSource with the same settings and re-use that.
-  // This no longer is possible since the resolution is being set in Allocate().
-
-  RefPtr<MediaEngineVideoSource> newSource = new MediaEngineDefaultVideoSource();
-  mVSources.AppendElement(newSource);
-  aVSources->AppendElement(newSource);
 }
 
 void
-MediaEngineDefault::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource,
-                                          nsTArray<RefPtr<MediaEngineAudioSource> >* aASources) {
-  MutexAutoLock lock(mMutex);
-  int32_t len = mASources.Length();
-
-  // aMediaSource is ignored for audio devices (for now).
+MediaEngineDefault::Shutdown()
+{
+  AssertIsOnOwningThread();
 
-  for (int32_t i = 0; i < len; i++) {
-    RefPtr<MediaEngineAudioSource> source = mASources.ElementAt(i);
-    if (source->IsAvailable()) {
-      aASources->AppendElement(source);
-    }
+  for (RefPtr<MediaEngineDefaultVideoSource>& source : mVSources) {
+    source->Shutdown();
   }
-
-  // All streams are currently busy, just make a new one.
-  if (aASources->Length() == 0) {
-    RefPtr<MediaEngineAudioSource> newSource =
-      new MediaEngineDefaultAudioSource();
-    mASources.AppendElement(newSource);
-    aASources->AppendElement(newSource);
+  for (RefPtr<MediaEngineDefaultAudioSource>& source : mASources) {
+    source->Shutdown();
   }
-}
+  mVSources.Clear();
+  mASources.Clear();
+};
 
 } // namespace mozilla
--- a/dom/media/webrtc/MediaEngineDefault.h
+++ b/dom/media/webrtc/MediaEngineDefault.h
@@ -7,226 +7,198 @@
 
 #include "nsINamed.h"
 #include "nsITimer.h"
 
 #include "nsAutoPtr.h"
 #include "nsCOMPtr.h"
 #include "DOMMediaStream.h"
 #include "nsComponentManagerUtils.h"
-#include "mozilla/Monitor.h"
+#include "mozilla/Mutex.h"
 
 #include "VideoUtils.h"
 #include "MediaEngine.h"
+#include "MediaEnginePrefs.h"
 #include "VideoSegment.h"
 #include "AudioSegment.h"
 #include "StreamTracks.h"
-#ifdef MOZ_WEBRTC
-#include "MediaEngineCameraVideoSource.h"
-#endif
+#include "MediaEngineSource.h"
 #include "MediaStreamGraph.h"
-#include "MediaTrackConstraints.h"
 #include "SineWaveGenerator.h"
 
 namespace mozilla {
 
 namespace layers {
 class ImageContainer;
 } // namespace layers
 
 class MediaEngineDefault;
 
 /**
  * The default implementation of the MediaEngine interface.
  */
-class MediaEngineDefaultVideoSource : public nsITimerCallback,
-                                      public nsINamed,
-#ifdef MOZ_WEBRTC
-                                      public MediaEngineCameraVideoSource
-#else
-                                      public MediaEngineVideoSource
-#endif
+class MediaEngineDefaultVideoSource : public MediaEngineSource
 {
 public:
   MediaEngineDefaultVideoSource();
 
-  void GetName(nsAString&) const override;
-  void GetUUID(nsACString&) const override;
+  bool IsAvailable() const override
+  {
+    AssertIsOnOwningThread();
+    return mState == kReleased;
+  }
+  nsString GetName() const override;
+  nsCString GetUUID() const override;
 
   nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
                     const MediaEnginePrefs &aPrefs,
                     const nsString& aDeviceId,
-                    const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
+                    const ipc::PrincipalInfo& aPrincipalInfo,
                     AllocationHandle** aOutHandle,
                     const char** aOutBadConstraint) override;
-  nsresult Deallocate(AllocationHandle* aHandle) override;
   nsresult Start(SourceMediaStream*, TrackID, const PrincipalHandle&) override;
+  nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                       const dom::MediaTrackConstraints& aConstraints,
+                       const MediaEnginePrefs& aPrefs,
+                       const nsString& aDeviceId,
+                       const char** aOutBadConstraint) override;
   nsresult Stop(SourceMediaStream*, TrackID) override;
-  nsresult Restart(AllocationHandle* aHandle,
-                   const dom::MediaTrackConstraints& aConstraints,
-                   const MediaEnginePrefs &aPrefs,
-                   const nsString& aDeviceId,
-                   const char** aOutBadConstraint) override;
-  void NotifyPull(MediaStreamGraph* aGraph,
-                  SourceMediaStream *aSource,
-                  TrackID aId,
-                  StreamTime aDesiredTime,
-                  const PrincipalHandle& aPrincipalHandle) override;
+  nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
+  void Pull(const RefPtr<const AllocationHandle>& aHandle,
+            const RefPtr<SourceMediaStream>& aStream,
+            TrackID aTrackID,
+            StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipalHandle) override;
+
   uint32_t GetBestFitnessDistance(
       const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
       const nsString& aDeviceId) const override;
 
-  bool IsFake() override {
+  bool IsFake() const override
+  {
     return true;
   }
 
-  dom::MediaSourceEnum GetMediaSource() const override {
+  dom::MediaSourceEnum GetMediaSource() const override
+  {
     return dom::MediaSourceEnum::Camera;
   }
 
-  nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
-  {
-    return NS_ERROR_NOT_IMPLEMENTED;
-  }
-
-  void Shutdown() override {
-    Stop(mSource, mTrackID);
-    MonitorAutoLock lock(mMonitor);
-    mImageContainer = nullptr;
-  }
-
-  NS_DECL_THREADSAFE_ISUPPORTS
-  NS_DECL_NSITIMERCALLBACK
-  NS_DECL_NSINAMED
-
 protected:
   ~MediaEngineDefaultVideoSource();
 
-  friend class MediaEngineDefault;
+  /**
+   * Called by mTimer when it's time to generate a new frame.
+   */
+  void GenerateFrame();
 
-  RefPtr<SourceMediaStream> mSource;
-  TrackID mTrackID;
   nsCOMPtr<nsITimer> mTimer;
 
-#ifndef MOZ_WEBRTC
-  // mMonitor protects mImage/mImageContainer access/changes, and
-  // transitions of mState from kStarted to kStopped (which are combined
-  // with EndTrack() and image changes).
-  Monitor mMonitor;
+  RefPtr<layers::ImageContainer> mImageContainer;
+
+  // mMutex protects mState, mImage, mStream, mTrackID
+  Mutex mMutex;
+
+  // Current state of this source.
+  // Set under mMutex on the owning thread. Accessed under one of the two.
+  MediaEngineSourceState mState = kReleased;
   RefPtr<layers::Image> mImage;
-  RefPtr<layers::ImageContainer> mImageContainer;
-#endif
+  RefPtr<SourceMediaStream> mStream;
+  TrackID mTrackID = TRACK_NONE;
 
   MediaEnginePrefs mOpts;
-  int mCb;
-  int mCr;
+  int mCb = 16;
+  int mCr = 16;
 };
 
 class SineWaveGenerator;
 
-class MediaEngineDefaultAudioSource : public MediaEngineAudioSource
+class MediaEngineDefaultAudioSource : public MediaEngineSource
 {
 public:
   MediaEngineDefaultAudioSource();
 
-  void GetName(nsAString&) const override;
-  void GetUUID(nsACString&) const override;
+  bool IsAvailable() const override
+  {
+    AssertIsOnOwningThread();
+    return mState == kReleased;
+  }
+  nsString GetName() const override;
+  nsCString GetUUID() const override;
 
   nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
                     const MediaEnginePrefs &aPrefs,
                     const nsString& aDeviceId,
-                    const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
+                    const ipc::PrincipalInfo& aPrincipalInfo,
                     AllocationHandle** aOutHandle,
                     const char** aOutBadConstraint) override;
-  nsresult Deallocate(AllocationHandle* aHandle) override;
   nsresult Start(SourceMediaStream*, TrackID, const PrincipalHandle&) override;
+  nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                       const dom::MediaTrackConstraints& aConstraints,
+                       const MediaEnginePrefs& aPrefs,
+                       const nsString& aDeviceId,
+                       const char** aOutBadConstraint) override;
   nsresult Stop(SourceMediaStream*, TrackID) override;
-  nsresult Restart(AllocationHandle* aHandle,
-                   const dom::MediaTrackConstraints& aConstraints,
-                   const MediaEnginePrefs &aPrefs,
-                   const nsString& aDeviceId,
-                   const char** aOutBadConstraint) override;
+  nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
   void inline AppendToSegment(AudioSegment& aSegment,
                               TrackTicks aSamples,
                               const PrincipalHandle& aPrincipalHandle);
-  void NotifyPull(MediaStreamGraph* aGraph,
-                  SourceMediaStream *aSource,
-                  TrackID aId,
-                  StreamTime aDesiredTime,
-                  const PrincipalHandle& aPrincipalHandle) override;
+  void Pull(const RefPtr<const AllocationHandle>& aHandle,
+            const RefPtr<SourceMediaStream>& aStream,
+            TrackID aTrackID,
+            StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipalHandle) override;
 
-  void NotifyOutputData(MediaStreamGraph* aGraph,
-                        AudioDataValue* aBuffer, size_t aFrames,
-                        TrackRate aRate, uint32_t aChannels) override
-  {}
-  void NotifyInputData(MediaStreamGraph* aGraph,
-                       const AudioDataValue* aBuffer, size_t aFrames,
-                       TrackRate aRate, uint32_t aChannels) override
-  {}
-  void DeviceChanged() override
-  {}
-  bool IsFake() override {
+  bool IsFake() const override
+  {
     return true;
   }
 
-  dom::MediaSourceEnum GetMediaSource() const override {
+  dom::MediaSourceEnum GetMediaSource() const override
+  {
     return dom::MediaSourceEnum::Microphone;
   }
 
-  nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
-  {
-    return NS_ERROR_NOT_IMPLEMENTED;
-  }
-
   uint32_t GetBestFitnessDistance(
       const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
       const nsString& aDeviceId) const override;
 
-  NS_DECL_THREADSAFE_ISUPPORTS
 
 protected:
   ~MediaEngineDefaultAudioSource();
 
-  TrackID mTrackID;
+  // mMutex protects mState, mStream, mTrackID
+  Mutex mMutex;
 
-  TrackTicks mLastNotify; // Accessed in ::Start(), then on NotifyPull (from MSG thread)
-  uint32_t mFreq; // ditto
+  // Current state of this source.
+  // Set under mMutex on the owning thread. Accessed under one of the two.
+  MediaEngineSourceState mState = kReleased;
+  RefPtr<SourceMediaStream> mStream;
+  TrackID mTrackID = TRACK_NONE;
 
-  // Created on Start, then accessed from NotifyPull (MSG thread)
+  // Accessed in ::Start(), then on Pull (from MSG thread)
+  TrackTicks mLastNotify = 0;
+  uint32_t mFreq = 1000; // ditto
+
+  // Created on Start, then accessed from Pull (MSG thread)
   nsAutoPtr<SineWaveGenerator> mSineGenerator;
 };
 
 
 class MediaEngineDefault : public MediaEngine
 {
-  typedef MediaEngine Super;
 public:
-  explicit MediaEngineDefault() : mMutex("mozilla::MediaEngineDefault") {}
+  MediaEngineDefault() = default;
 
-  void EnumerateVideoDevices(dom::MediaSourceEnum,
-                             nsTArray<RefPtr<MediaEngineVideoSource> >*) override;
-  void EnumerateAudioDevices(dom::MediaSourceEnum,
-                             nsTArray<RefPtr<MediaEngineAudioSource> >*) override;
-  void Shutdown() override {
-    MutexAutoLock lock(mMutex);
-
-    for (auto& source : mVSources) {
-      source->Shutdown();
-    }
-    for (auto& source : mASources) {
-      source->Shutdown();
-    }
-    mVSources.Clear();
-    mASources.Clear();
-  };
+  void EnumerateDevices(dom::MediaSourceEnum,
+                        nsTArray<RefPtr<MediaEngineSource>>*) override;
+  void Shutdown() override;
 
 private:
-  ~MediaEngineDefault() {}
+  ~MediaEngineDefault() = default;
 
-  Mutex mMutex;
-  // protected with mMutex:
-  nsTArray<RefPtr<MediaEngineVideoSource> > mVSources;
-  nsTArray<RefPtr<MediaEngineAudioSource> > mASources;
+  nsTArray<RefPtr<MediaEngineDefaultVideoSource>> mVSources;
+  nsTArray<RefPtr<MediaEngineDefaultAudioSource>> mASources;
 };
 
 } // namespace mozilla
 
 #endif /* NSMEDIAENGINEDEFAULT_H_ */
copy from dom/media/webrtc/MediaEngine.h
copy to dom/media/webrtc/MediaEnginePrefs.h
--- a/dom/media/webrtc/MediaEngine.h
+++ b/dom/media/webrtc/MediaEnginePrefs.h
@@ -1,86 +1,30 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef MEDIAENGINE_H_
-#define MEDIAENGINE_H_
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include "mozilla/RefPtr.h"
-#include "DOMMediaStream.h"
-#include "MediaStreamGraph.h"
-#include "MediaTrackConstraints.h"
-#include "mozilla/dom/MediaStreamTrackBinding.h"
-#include "mozilla/dom/VideoStreamTrack.h"
-#include "mozilla/ipc/PBackgroundSharedTypes.h"
-#include "mozilla/media/DeviceChangeCallback.h"
+#ifndef MediaEnginePrefs_h
+#define MediaEnginePrefs_h
 
 namespace mozilla {
 
-namespace dom {
-class Blob;
-} // namespace dom
-
-enum {
-  kVideoTrack = 1,
-  kAudioTrack = 2,
-  kTrackCount
-};
-
 /**
- * Abstract interface for managing audio and video devices. Each platform
- * must implement a concrete class that will map these classes and methods
- * to the appropriate backend. For example, on Desktop platforms, these will
- * correspond to equivalent webrtc (GIPS) calls.
+ * Video source and friends.
  */
-class MediaEngineVideoSource;
-class MediaEngineAudioSource;
-
-enum MediaEngineState {
-  kAllocated,
-  kStarted,
-  kStopped,
-  kReleased
-};
-
-class MediaEngine : public DeviceChangeCallback
-{
+class MediaEnginePrefs {
 public:
-  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaEngine)
-
   static const int DEFAULT_VIDEO_FPS = 30;
   static const int DEFAULT_43_VIDEO_WIDTH = 640;
   static const int DEFAULT_43_VIDEO_HEIGHT = 480;
   static const int DEFAULT_169_VIDEO_WIDTH = 1280;
   static const int DEFAULT_169_VIDEO_HEIGHT = 720;
 
-  /* Populate an array of video sources in the nsTArray. Also include devices
-   * that are currently unavailable. */
-  virtual void EnumerateVideoDevices(dom::MediaSourceEnum,
-                                     nsTArray<RefPtr<MediaEngineVideoSource> >*) = 0;
-
-  /* Populate an array of audio sources in the nsTArray. Also include devices
-   * that are currently unavailable. */
-  virtual void EnumerateAudioDevices(dom::MediaSourceEnum,
-                                     nsTArray<RefPtr<MediaEngineAudioSource> >*) = 0;
-
-  virtual void Shutdown() = 0;
-
-  virtual void SetFakeDeviceChangeEvents() {}
-
-protected:
-  virtual ~MediaEngine() {}
-};
-
-/**
- * Video source and friends.
- */
-class MediaEnginePrefs {
-public:
   MediaEnginePrefs()
     : mWidth(0)
     , mHeight(0)
     , mFPS(0)
     , mFreq(0)
     , mAecOn(false)
     , mAgcOn(false)
     , mNoiseOn(false)
@@ -118,364 +62,34 @@ public:
                              GetDefWidth(aHD));
   }
 
   int32_t GetHeight(bool aHD = false) const {
     return mHeight? mHeight : (mWidth?
                                (mWidth * GetDefHeight(aHD)) / GetDefWidth(aHD) :
                                GetDefHeight(aHD));
   }
+
 private:
   static int32_t GetDefWidth(bool aHD = false) {
     // It'd be nice if we could use the ternary operator here, but we can't
     // because of bug 1002729.
     if (aHD) {
-      return MediaEngine::DEFAULT_169_VIDEO_WIDTH;
+      return DEFAULT_169_VIDEO_WIDTH;
     }
 
-    return MediaEngine::DEFAULT_43_VIDEO_WIDTH;
+    return DEFAULT_43_VIDEO_WIDTH;
   }
 
   static int32_t GetDefHeight(bool aHD = false) {
     // It'd be nice if we could use the ternary operator here, but we can't
     // because of bug 1002729.
     if (aHD) {
-      return MediaEngine::DEFAULT_169_VIDEO_HEIGHT;
-    }
-
-    return MediaEngine::DEFAULT_43_VIDEO_HEIGHT;
-  }
-};
-
-/**
- * Callback interface for TakePhoto(). Either PhotoComplete() or PhotoError()
- * should be called.
- */
-class MediaEnginePhotoCallback {
-public:
-  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaEnginePhotoCallback)
-
-  // aBlob is the image captured by MediaEngineSource. It is
-  // called on main thread.
-  virtual nsresult PhotoComplete(already_AddRefed<dom::Blob> aBlob) = 0;
-
-  // It is called on main thread. aRv is the error code.
-  virtual nsresult PhotoError(nsresult aRv) = 0;
-
-protected:
-  virtual ~MediaEnginePhotoCallback() {}
-};
-
-/**
- * Common abstract base class for audio and video sources.
- *
- * By default, the base class implements Allocate and Deallocate using its
- * UpdateSingleSource pattern, which manages allocation handles and calculates
- * net constraints from competing allocations and updates a single shared device.
- *
- * Classes that don't operate as a single shared device can override Allocate
- * and Deallocate and simply not pass the methods up.
- */
-class MediaEngineSource : public nsISupports,
-                          protected MediaConstraintsHelper
-{
-public:
-  // code inside webrtc.org assumes these sizes; don't use anything smaller
-  // without verifying it's ok
-  static const unsigned int kMaxDeviceNameLength = 128;
-  static const unsigned int kMaxUniqueIdLength = 256;
-
-  virtual ~MediaEngineSource()
-  {
-    if (!mInShutdown) {
-      Shutdown();
-    }
-  }
-
-  virtual void Shutdown()
-  {
-    mInShutdown = true;
-  };
-
-  /* Populate the human readable name of this device in the nsAString */
-  virtual void GetName(nsAString&) const = 0;
-
-  /* Populate the UUID of this device in the nsACString */
-  virtual void GetUUID(nsACString&) const = 0;
-
-  /* Override w/true if source does end-run around cross origin restrictions. */
-  virtual bool GetScary() const { return false; };
-
-  class AllocationHandle
-  {
-  public:
-    NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AllocationHandle);
-  protected:
-    ~AllocationHandle() {}
-    static uint64_t sId;
-  public:
-    AllocationHandle(const dom::MediaTrackConstraints& aConstraints,
-                     const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
-                     const MediaEnginePrefs& aPrefs,
-                     const nsString& aDeviceId)
-
-    : mConstraints(aConstraints),
-      mPrincipalInfo(aPrincipalInfo),
-      mPrefs(aPrefs),
-#ifdef MOZ_WEBRTC
-      mId(sId++),
-#endif
-      mDeviceId(aDeviceId) {}
-  public:
-    NormalizedConstraints mConstraints;
-    mozilla::ipc::PrincipalInfo mPrincipalInfo;
-    MediaEnginePrefs mPrefs;
-    uint64_t mId;
-    nsString mDeviceId;
-  };
-
-  /* Release the device back to the system. */
-  virtual nsresult Deallocate(AllocationHandle* aHandle)
-  {
-    MOZ_ASSERT(aHandle);
-    RefPtr<AllocationHandle> handle = aHandle;
-
-    class Comparator {
-    public:
-      static bool Equals(const RefPtr<AllocationHandle>& a,
-                         const RefPtr<AllocationHandle>& b) {
-        return a.get() == b.get();
-      }
-    };
-
-    auto ix = mRegisteredHandles.IndexOf(handle, 0, Comparator());
-    if (ix == mRegisteredHandles.NoIndex) {
-      MOZ_ASSERT(false);
-      return NS_ERROR_FAILURE;
+      return DEFAULT_169_VIDEO_HEIGHT;
     }
 
-    mRegisteredHandles.RemoveElementAt(ix);
-    if (mRegisteredHandles.Length() && !mInShutdown) {
-      // Whenever constraints are removed, other parties may get closer to ideal.
-      auto& first = mRegisteredHandles[0];
-      const char* badConstraint = nullptr;
-      return ReevaluateAllocation(nullptr, nullptr, first->mPrefs,
-                                  first->mDeviceId, &badConstraint);
-    }
-    return NS_OK;
-  }
-
-  /* Start the device and add the track to the provided SourceMediaStream, with
-   * the provided TrackID. You may start appending data to the track
-   * immediately after. */
-  virtual nsresult Start(SourceMediaStream*, TrackID, const PrincipalHandle&) = 0;
-
-  /* Called when the stream wants more data */
-  virtual void NotifyPull(MediaStreamGraph* aGraph,
-                          SourceMediaStream *aSource,
-                          TrackID aId,
-                          StreamTime aDesiredTime,
-                          const PrincipalHandle& aPrincipalHandle) = 0;
-
-  /* Stop the device and release the corresponding MediaStream */
-  virtual nsresult Stop(SourceMediaStream *aSource, TrackID aID) = 0;
-
-  /* Restart with new capability */
-  virtual nsresult Restart(AllocationHandle* aHandle,
-                           const dom::MediaTrackConstraints& aConstraints,
-                           const MediaEnginePrefs &aPrefs,
-                           const nsString& aDeviceId,
-                           const char** aOutBadConstraint) = 0;
-
-  /* Returns true if a source represents a fake capture device and
-   * false otherwise
-   */
-  virtual bool IsFake() = 0;
-
-  /* Returns the type of media source (camera, microphone, screen, window, etc) */
-  virtual dom::MediaSourceEnum GetMediaSource() const = 0;
-
-  /* If implementation of MediaEngineSource supports TakePhoto(), the picture
-   * should be return via aCallback object. Otherwise, it returns NS_ERROR_NOT_IMPLEMENTED.
-   */
-  virtual nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) = 0;
-
-  /* Return false if device is currently allocated or started */
-  bool IsAvailable() {
-    if (mState == kAllocated || mState == kStarted) {
-      return false;
-    } else {
-      return true;
-    }
-  }
-
-  /* It is an error to call Start() before an Allocate(), and Stop() before
-   * a Start(). Only Allocate() may be called after a Deallocate(). */
-
-  /* This call reserves but does not start the device. */
-  virtual nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
-                            const MediaEnginePrefs &aPrefs,
-                            const nsString& aDeviceId,
-                            const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
-                            AllocationHandle** aOutHandle,
-                            const char** aOutBadConstraint)
-  {
-    AssertIsOnOwningThread();
-    MOZ_ASSERT(aOutHandle);
-    RefPtr<AllocationHandle> handle =
-      new AllocationHandle(aConstraints, aPrincipalInfo, aPrefs, aDeviceId);
-    nsresult rv = ReevaluateAllocation(handle, nullptr, aPrefs, aDeviceId,
-                                       aOutBadConstraint);
-    if (NS_FAILED(rv)) {
-      return rv;
-    }
-    mRegisteredHandles.AppendElement(handle);
-    handle.forget(aOutHandle);
-    return NS_OK;
-  }
-
-  virtual uint32_t GetBestFitnessDistance(
-      const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
-      const nsString& aDeviceId) const = 0;
-
-  void GetSettings(dom::MediaTrackSettings& aOutSettings)
-  {
-    MOZ_ASSERT(NS_IsMainThread());
-    aOutSettings = *mSettings;
+    return DEFAULT_43_VIDEO_HEIGHT;
   }
-
-protected:
-  // Only class' own members can be initialized in constructor initializer list.
-  explicit MediaEngineSource(MediaEngineState aState)
-    : mState(aState)
-    , mInShutdown(false)
-    , mSettings(MakeRefPtr<media::Refcountable<dom::MediaTrackSettings>>())
-  {}
-
-  /* UpdateSingleSource - Centralized abstract function to implement in those
-   * cases where a single device is being shared between users. Should apply net
-   * constraints and restart the device as needed.
-   *
-   * aHandle           - New or existing handle, or null to update after removal.
-   * aNetConstraints   - Net constraints to be applied to the single device.
-   * aPrefs            - As passed in (in case of changes in about:config).
-   * aDeviceId         - As passed in (origin dependent).
-   * aOutBadConstraint - Result: nonzero if failed to apply. Name of culprit.
-   */
-
-  virtual nsresult
-  UpdateSingleSource(const AllocationHandle* aHandle,
-                     const NormalizedConstraints& aNetConstraints,
-                     const NormalizedConstraints& aNewConstraint,
-                     const MediaEnginePrefs& aPrefs,
-                     const nsString& aDeviceId,
-                     const char** aOutBadConstraint) {
-    return NS_ERROR_NOT_IMPLEMENTED;
-  };
-
-  /* ReevaluateAllocation - Call to change constraints for an allocation of
-   * a single device. Manages allocation handles, calculates net constraints
-   * from all competing allocations, and calls UpdateSingleSource with the net
-   * result, to restart the single device as needed.
-   *
-   * aHandle            - New or existing handle, or null to update after removal.
-   * aConstraintsUpdate - Constraints to be applied to existing handle, or null.
-   * aPrefs             - As passed in (in case of changes from about:config).
-   * aDeviceId          - As passed in (origin-dependent id).
-   * aOutBadConstraint  - Result: nonzero if failed to apply. Name of culprit.
-   */
-
-  nsresult
-  ReevaluateAllocation(AllocationHandle* aHandle,
-                       NormalizedConstraints* aConstraintsUpdate,
-                       const MediaEnginePrefs& aPrefs,
-                       const nsString& aDeviceId,
-                       const char** aOutBadConstraint)
-  {
-    // aHandle and/or aConstraintsUpdate may be nullptr (see below)
-
-    AutoTArray<const NormalizedConstraints*, 10> allConstraints;
-    AutoTArray<const NormalizedConstraints*, 1> updatedConstraint;
-    for (auto& registered : mRegisteredHandles) {
-      if (aConstraintsUpdate && registered.get() == aHandle) {
-        continue; // Don't count old constraints
-      }
-      allConstraints.AppendElement(&registered->mConstraints);
-    }
-    if (aConstraintsUpdate) {
-      allConstraints.AppendElement(aConstraintsUpdate);
-      updatedConstraint.AppendElement(aConstraintsUpdate);
-    } else if (aHandle) {
-      // In the case of AddShareOfSingleSource, the handle isn't registered yet.
-      allConstraints.AppendElement(&aHandle->mConstraints);
-      updatedConstraint.AppendElement(&aHandle->mConstraints);
-    } else {
-      updatedConstraint.AppendElements(allConstraints);
-    }
-
-    NormalizedConstraints netConstraints(allConstraints);
-    if (netConstraints.mBadConstraint) {
-      *aOutBadConstraint = netConstraints.mBadConstraint;
-      return NS_ERROR_FAILURE;
-    }
-
-    NormalizedConstraints newConstraint(updatedConstraint);
-    nsresult rv = UpdateSingleSource(aHandle, netConstraints, newConstraint, aPrefs, aDeviceId,
-                                     aOutBadConstraint);
-    if (NS_FAILED(rv)) {
-      return rv;
-    }
-    if (aHandle && aConstraintsUpdate) {
-      aHandle->mConstraints = *aConstraintsUpdate;
-    }
-    return NS_OK;
-  }
-
-  void AssertIsOnOwningThread()
-  {
-    NS_ASSERT_OWNINGTHREAD(MediaEngineSource);
-  }
-
-  MediaEngineState mState;
-
-  NS_DECL_OWNINGTHREAD
-
-  nsTArray<RefPtr<AllocationHandle>> mRegisteredHandles;
-  bool mInShutdown;
-
-  // The following is accessed on main-thread only. It has its own ref-count to
-  // avoid ref-counting MediaEngineSource itself in runnables.
-  // (MediaEngineSource subclasses balk on ref-counts too late during shutdown.)
-  RefPtr<media::Refcountable<dom::MediaTrackSettings>> mSettings;
-};
-
-class MediaEngineVideoSource : public MediaEngineSource
-{
-public:
-  virtual ~MediaEngineVideoSource() {}
-
-protected:
-  explicit MediaEngineVideoSource(MediaEngineState aState)
-    : MediaEngineSource(aState) {}
-  MediaEngineVideoSource()
-    : MediaEngineSource(kReleased) {}
-};
-
-/**
- * Audio source and friends.
- */
-class MediaEngineAudioSource : public MediaEngineSource,
-                               public AudioDataListenerInterface
-{
-public:
-  virtual ~MediaEngineAudioSource() {}
-
-protected:
-  explicit MediaEngineAudioSource(MediaEngineState aState)
-    : MediaEngineSource(aState) {}
-  MediaEngineAudioSource()
-    : MediaEngineSource(kReleased) {}
-
 };
 
 } // namespace mozilla
 
-#endif /* MEDIAENGINE_H_ */
+#endif // MediaEnginePrefs_h
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -1,665 +1,946 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaEngineRemoteVideoSource.h"
 
+#include "AllocationHandle.h"
+#include "CamerasChild.h"
+#include "MediaManager.h"
+#include "MediaTrackConstraints.h"
 #include "mozilla/RefPtr.h"
-#include "VideoUtils.h"
 #include "nsIPrefService.h"
-#include "MediaTrackConstraints.h"
-#include "CamerasChild.h"
 #include "VideoFrameUtils.h"
+#include "VideoUtils.h"
 #include "webrtc/api/video/i420_buffer.h"
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 
-extern mozilla::LogModule* GetMediaManagerLog();
+mozilla::LogModule* GetMediaManagerLog();
 #define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
 #define LOGFRAME(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
 
 namespace mozilla {
 
-uint64_t MediaEngineCameraVideoSource::AllocationHandle::sId = 0;
-
-// These need a definition somewhere because template
-// code is allowed to take their address, and they aren't
-// guaranteed to have one without this.
-const unsigned int MediaEngineSource::kMaxDeviceNameLength;
-const unsigned int MediaEngineSource::kMaxUniqueIdLength;;
-
 using dom::ConstrainLongRange;
-
-NS_IMPL_ISUPPORTS0(MediaEngineRemoteVideoSource)
+using dom::MediaSourceEnum;
+using dom::MediaTrackConstraints;
+using dom::MediaTrackConstraintSet;
+using dom::MediaTrackSettings;
+using dom::VideoFacingModeEnum;
 
 MediaEngineRemoteVideoSource::MediaEngineRemoteVideoSource(
-  int aIndex, mozilla::camera::CaptureEngine aCapEngine,
-  dom::MediaSourceEnum aMediaSource, bool aScary, const char* aMonitorName)
-  : MediaEngineCameraVideoSource(aIndex, aMonitorName),
-    mMediaSource(aMediaSource),
-    mCapEngine(aCapEngine),
-    mScary(aScary)
+    int aIndex,
+    camera::CaptureEngine aCapEngine,
+    MediaSourceEnum aMediaSource,
+    bool aScary)
+  : mCaptureIndex(aIndex)
+  , mMediaSource(aMediaSource)
+  , mCapEngine(aCapEngine)
+  , mScary(aScary)
+  , mMutex("MediaEngineRemoteVideoSource::mMutex")
+  , mSettings(MakeAndAddRef<media::Refcountable<MediaTrackSettings>>())
 {
-  MOZ_ASSERT(aMediaSource != dom::MediaSourceEnum::Other);
-  mSettings->mWidth.Construct(0);
-  mSettings->mHeight.Construct(0);
-  mSettings->mFrameRate.Construct(0);
+  MOZ_ASSERT(aMediaSource != MediaSourceEnum::Other);
   Init();
 }
 
 void
 MediaEngineRemoteVideoSource::Init()
 {
   LOG((__PRETTY_FUNCTION__));
+  AssertIsOnOwningThread();
+
   char deviceName[kMaxDeviceNameLength];
   char uniqueId[kMaxUniqueIdLength];
-  if (mozilla::camera::GetChildAndCall(
-    &mozilla::camera::CamerasChild::GetCaptureDevice,
-    mCapEngine, mCaptureIndex,
-    deviceName, kMaxDeviceNameLength,
-    uniqueId, kMaxUniqueIdLength, nullptr)) {
+  if (camera::GetChildAndCall(&camera::CamerasChild::GetCaptureDevice,
+                              mCapEngine, mCaptureIndex,
+                              deviceName, kMaxDeviceNameLength,
+                              uniqueId, kMaxUniqueIdLength, nullptr)) {
     LOG(("Error initializing RemoteVideoSource (GetCaptureDevice)"));
     return;
   }
 
   SetName(NS_ConvertUTF8toUTF16(deviceName));
   SetUUID(uniqueId);
 
   mInitDone = true;
 }
 
 void
 MediaEngineRemoteVideoSource::Shutdown()
 {
   LOG((__PRETTY_FUNCTION__));
+  AssertIsOnOwningThread();
+
   if (!mInitDone) {
+    // Already shut down
     return;
   }
+
+  // Allocate always returns a null AllocationHandle.
+  // We can safely pass nullptr here.
   if (mState == kStarted) {
-    SourceMediaStream *source;
-    bool empty;
+    Stop(mStream, mTrackID);
+  }
+  if (mState == kAllocated || mState == kStopped) {
+    Deallocate(nullptr);
+  }
+  MOZ_ASSERT(mState == kReleased);
+
+  mInitDone = false;
+}
+
+void
+MediaEngineRemoteVideoSource::SetName(nsString aName)
+{
+  LOG((__PRETTY_FUNCTION__));
+  AssertIsOnOwningThread();
+
+  mDeviceName = Move(aName);
+  bool hasFacingMode = false;
+  VideoFacingModeEnum facingMode = VideoFacingModeEnum::User;
+
+  // Set facing mode based on device name.
+#if defined(ANDROID)
+  // Names are generated. Example: "Camera 0, Facing back, Orientation 90"
+  //
+  // See media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/
+  // webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
 
-    while (1) {
-      {
-        MonitorAutoLock lock(mMonitor);
-        empty = mSources.IsEmpty();
-        if (empty) {
-          MOZ_ASSERT(mPrincipalHandles.IsEmpty());
-          MOZ_ASSERT(mTargetCapabilities.IsEmpty());
-          MOZ_ASSERT(mHandleIds.IsEmpty());
-          MOZ_ASSERT(mImages.IsEmpty());
-          break;
-        }
-        source = mSources[0];
-      }
-      Stop(source, kVideoTrack); // XXX change to support multiple tracks
-    }
-    MOZ_ASSERT(mState == kStopped);
+  if (aName.Find(NS_LITERAL_STRING("Facing back")) != kNotFound) {
+    hasFacingMode = true;
+    facingMode = VideoFacingModeEnum::Environment;
+  } else if (aName.Find(NS_LITERAL_STRING("Facing front")) != kNotFound) {
+    hasFacingMode = true;
+    facingMode = VideoFacingModeEnum::User;
+  }
+#endif // ANDROID
+#ifdef XP_MACOSX
+  // Kludge to test user-facing cameras on OSX.
+  if (aName.Find(NS_LITERAL_STRING("Face")) != -1) {
+    hasFacingMode = true;
+    facingMode = VideoFacingModeEnum::User;
+  }
+#endif
+#ifdef XP_WIN
+  // The cameras' name of Surface book are "Microsoft Camera Front" and
+  // "Microsoft Camera Rear" respectively.
+
+  if (aName.Find(NS_LITERAL_STRING("Front")) != kNotFound) {
+    hasFacingMode = true;
+    facingMode = VideoFacingModeEnum::User;
+  } else if (aName.Find(NS_LITERAL_STRING("Rear")) != kNotFound) {
+    hasFacingMode = true;
+    facingMode = VideoFacingModeEnum::Environment;
   }
-
-  for (auto& registered : mRegisteredHandles) {
-    MOZ_ASSERT(mState == kAllocated || mState == kStopped);
-    Deallocate(registered.get());
+#endif // WINDOWS
+  if (hasFacingMode) {
+    mFacingMode.Assign(NS_ConvertUTF8toUTF16(
+        dom::VideoFacingModeEnumValues::strings[uint32_t(facingMode)].value));
+  } else {
+    mFacingMode.Truncate();
   }
+}
 
-  MOZ_ASSERT(mState == kReleased);
-  Super::Shutdown();
-  mInitDone = false;
+nsString
+MediaEngineRemoteVideoSource::GetName() const
+{
+  AssertIsOnOwningThread();
+
+  return mDeviceName;
+}
+
+void
+MediaEngineRemoteVideoSource::SetUUID(const char* aUUID)
+{
+  AssertIsOnOwningThread();
+
+  mUniqueId.Assign(aUUID);
+}
+
+nsCString
+MediaEngineRemoteVideoSource::GetUUID() const
+{
+  AssertIsOnOwningThread();
+
+  return mUniqueId;
 }
 
 nsresult
 MediaEngineRemoteVideoSource::Allocate(
-    const dom::MediaTrackConstraints& aConstraints,
+    const MediaTrackConstraints& aConstraints,
     const MediaEnginePrefs& aPrefs,
     const nsString& aDeviceId,
     const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
     AllocationHandle** aOutHandle,
     const char** aOutBadConstraint)
 {
   LOG((__PRETTY_FUNCTION__));
   AssertIsOnOwningThread();
 
   if (!mInitDone) {
     LOG(("Init not done"));
     return NS_ERROR_FAILURE;
   }
 
-  nsresult rv = Super::Allocate(aConstraints, aPrefs, aDeviceId, aPrincipalInfo,
-                                aOutHandle, aOutBadConstraint);
-  if (NS_FAILED(rv)) {
-    return rv;
+  MOZ_ASSERT(mState == kReleased);
+
+  NormalizedConstraints constraints(aConstraints);
+  LOG(("ChooseCapability(kFitness) for mTargetCapability and mCapability (Allocate) ++"));
+  if (!ChooseCapability(constraints, aPrefs, aDeviceId, mCapability, kFitness)) {
+    *aOutBadConstraint =
+      MediaConstraintsHelper::FindBadConstraint(constraints, this, aDeviceId);
+    return NS_ERROR_FAILURE;
+  }
+  LOG(("ChooseCapability(kFitness) for mTargetCapability and mCapability (Allocate) --"));
+
+  if (camera::GetChildAndCall(&camera::CamerasChild::AllocateCaptureDevice,
+                              mCapEngine, mUniqueId.get(),
+                              kMaxUniqueIdLength, mCaptureIndex,
+                              aPrincipalInfo)) {
+    return NS_ERROR_FAILURE;
+  }
+
+  *aOutHandle = nullptr;
+
+  {
+    MutexAutoLock lock(mMutex);
+    mState = kAllocated;
   }
-  if (mState == kStarted &&
-      MOZ_LOG_TEST(GetMediaManagerLog(), mozilla::LogLevel::Debug)) {
-    MonitorAutoLock lock(mMonitor);
-    if (mSources.IsEmpty()) {
-      MOZ_ASSERT(mPrincipalHandles.IsEmpty());
-      MOZ_ASSERT(mTargetCapabilities.IsEmpty());
-      MOZ_ASSERT(mHandleIds.IsEmpty());
-      MOZ_ASSERT(mImages.IsEmpty());
-      LOG(("Video device %d reallocated", mCaptureIndex));
-    } else {
-      LOG(("Video device %d allocated shared", mCaptureIndex));
-    }
+
+  LOG(("Video device %d allocated", mCaptureIndex));
+  return NS_OK;
+}
+
+nsresult
+MediaEngineRemoteVideoSource::Deallocate(const RefPtr<const AllocationHandle>& aHandle)
+{
+  LOG((__PRETTY_FUNCTION__));
+  AssertIsOnOwningThread();
+
+  MOZ_ASSERT(mState == kStopped || mState == kAllocated);
+  MOZ_ASSERT(mStream);
+  MOZ_ASSERT(IsTrackIDExplicit(mTrackID));
+
+  {
+    MutexAutoLock lock(mMutex);
+
+    mStream = nullptr;
+    mTrackID = TRACK_NONE;
+    mPrincipal = PRINCIPAL_HANDLE_NONE;
+    mState = kReleased;
+  }
+
+  // Stop() has stopped capture synchronously on the media thread before we get
+  // here, so there are no longer any callbacks on an IPC thread accessing
+  // mImageContainer.
+  mImageContainer = nullptr;
+
+  LOG(("Video device %d deallocated", mCaptureIndex));
+
+  if (camera::GetChildAndCall(&camera::CamerasChild::ReleaseCaptureDevice,
+                              mCapEngine, mCaptureIndex)) {
+    MOZ_ASSERT_UNREACHABLE("Couldn't release allocated device");
   }
   return NS_OK;
 }
 
 nsresult
-MediaEngineRemoteVideoSource::Deallocate(AllocationHandle* aHandle)
+MediaEngineRemoteVideoSource::Start(SourceMediaStream* aStream,
+                                    TrackID aTrackID,
+                                    const PrincipalHandle& aPrincipal)
 {
   LOG((__PRETTY_FUNCTION__));
   AssertIsOnOwningThread();
 
-  Super::Deallocate(aHandle);
+  MOZ_ASSERT(aStream);
+  MOZ_ASSERT(IsTrackIDExplicit(aTrackID));
+
+  if (!mImageContainer) {
+    mImageContainer = layers::LayerManager::CreateImageContainer(
+                          layers::ImageContainer::ASYNCHRONOUS);
+  }
+
+  {
+    MutexAutoLock lock(mMutex);
+    mStream = aStream;
+    mTrackID = aTrackID;
+    mPrincipal = aPrincipal;
+    mState = kStarted;
+  }
 
-  if (!mRegisteredHandles.Length()) {
-    if (mState != kStopped && mState != kAllocated) {
-      return NS_ERROR_FAILURE;
-    }
-    mozilla::camera::GetChildAndCall(
-      &mozilla::camera::CamerasChild::ReleaseCaptureDevice,
-      mCapEngine, mCaptureIndex);
-    mState = kReleased;
-    LOG(("Video device %d deallocated", mCaptureIndex));
-  } else {
-    LOG(("Video device %d deallocated but still in use", mCaptureIndex));
+  if (camera::GetChildAndCall(&camera::CamerasChild::StartCapture,
+                              mCapEngine, mCaptureIndex, mCapability, this)) {
+    LOG(("StartCapture failed"));
+    MutexAutoLock lock(mMutex);
+    mStream = nullptr;
+    mTrackID = TRACK_NONE;
+    mPrincipal = PRINCIPAL_HANDLE_NONE;
+    mState = kStopped;
+    return NS_ERROR_FAILURE;
   }
+
+  NS_DispatchToMainThread(media::NewRunnableFrom([settings = mSettings]() mutable {
+    settings->mWidth.Construct(0);
+    settings->mHeight.Construct(0);
+    settings->mFrameRate.Construct(0);
+    return NS_OK;
+  }));
+
+  mStream->AddTrack(mTrackID, 0, new VideoSegment(), SourceMediaStream::ADDTRACK_QUEUED);
+
   return NS_OK;
 }
 
 nsresult
-MediaEngineRemoteVideoSource::Start(SourceMediaStream* aStream, TrackID aID,
-                                    const PrincipalHandle& aPrincipalHandle)
+MediaEngineRemoteVideoSource::Stop(SourceMediaStream* aStream,
+                                   TrackID aTrackID)
 {
   LOG((__PRETTY_FUNCTION__));
   AssertIsOnOwningThread();
-  if (!mInitDone || !aStream) {
-    LOG(("No stream or init not done"));
-    return NS_ERROR_FAILURE;
-  }
 
-  if (!mImageContainer) {
-    mImageContainer =
-      layers::LayerManager::CreateImageContainer(layers::ImageContainer::ASYNCHRONOUS);
-  }
+  MOZ_ASSERT(mState == kStarted);
+
+  aStream->EndTrack(aTrackID);
 
   {
-    MonitorAutoLock lock(mMonitor);
-    mSources.AppendElement(aStream);
-    mPrincipalHandles.AppendElement(aPrincipalHandle);
-    mTargetCapabilities.AppendElement(mTargetCapability);
-    mHandleIds.AppendElement(mHandleId);
-    mImages.AppendElement(nullptr);
+    MutexAutoLock lock(mMutex);
+    mState = kStopped;
 
-    MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
-    MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
-    MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
-    MOZ_ASSERT(mSources.Length() == mImages.Length());
+    // Drop any cached image so we don't start with a stale image on next
+    // usage.  Also, gfx gets very upset if these are held until this object
+    // is gc'd in final-cc during shutdown (bug 1374164)
+    mImage = nullptr;
   }
 
-  aStream->AddTrack(aID, 0, new VideoSegment(), SourceMediaStream::ADDTRACK_QUEUED);
-
-  if (mState == kStarted) {
-    return NS_OK;
-  }
-
-  mState = kStarted;
-  mTrackID = aID;
-
-  if (mozilla::camera::GetChildAndCall(
-    &mozilla::camera::CamerasChild::StartCapture,
-    mCapEngine, mCaptureIndex, mCapability, this)) {
-    LOG(("StartCapture failed"));
-    return NS_ERROR_FAILURE;
+  if (camera::GetChildAndCall(&camera::CamerasChild::StopCapture,
+                              mCapEngine, mCaptureIndex)) {
+    MOZ_DIAGNOSTIC_ASSERT(false, "Stopping a started capture failed");
   }
 
   return NS_OK;
 }
 
 nsresult
-MediaEngineRemoteVideoSource::Stop(mozilla::SourceMediaStream* aSource,
-                                   mozilla::TrackID aID)
+MediaEngineRemoteVideoSource::Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                                          const MediaTrackConstraints& aConstraints,
+                                          const MediaEnginePrefs& aPrefs,
+                                          const nsString& aDeviceId,
+                                          const char** aOutBadConstraint)
 {
   LOG((__PRETTY_FUNCTION__));
   AssertIsOnOwningThread();
-  {
-    MonitorAutoLock lock(mMonitor);
+
+  MOZ_ASSERT(mInitDone);
+
+  NormalizedConstraints constraints(aConstraints);
+  webrtc::CaptureCapability newCapability;
+  LOG(("ChooseCapability(kFitness) for mTargetCapability (Reconfigure) ++"));
+  if (!ChooseCapability(constraints, aPrefs, aDeviceId, newCapability, kFitness)) {
+    *aOutBadConstraint =
+      MediaConstraintsHelper::FindBadConstraint(constraints, this, aDeviceId);
+    return NS_ERROR_FAILURE;
+  }
+  LOG(("ChooseCapability(kFitness) for mTargetCapability (Reconfigure) --"));
 
-    // Drop any cached image so we don't start with a stale image on next
-    // usage.  Also, gfx gets very upset if these are held until this object
-    // is gc'd in final-cc during shutdown (bug 1374164)
-    mImage = nullptr;
-    // we drop mImageContainer only in MediaEngineCaptureVideoSource::Shutdown()
+  if (mCapability == newCapability) {
+    return NS_OK;
+  }
+
+  // Start() applies mCapability on the device.
+  mCapability = newCapability;
 
-    size_t i = mSources.IndexOf(aSource);
-    if (i == mSources.NoIndex) {
-      // Already stopped - this is allowed
-      return NS_OK;
+
+  if (mState == kStarted) {
+    // Allocate always returns a null AllocationHandle.
+    // We can safely pass nullptr below.
+    nsresult rv = Stop(mStream, mTrackID);
+    if (NS_WARN_IF(NS_FAILED(rv))) {
+      return rv;
     }
 
-    MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
-    MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
-    MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
-    MOZ_ASSERT(mSources.Length() == mImages.Length());
-    mSources.RemoveElementAt(i);
-    mPrincipalHandles.RemoveElementAt(i);
-    mTargetCapabilities.RemoveElementAt(i);
-    mHandleIds.RemoveElementAt(i);
-    mImages.RemoveElementAt(i);
-
-    aSource->EndTrack(aID);
-
-    if (!mSources.IsEmpty()) {
-      return NS_OK;
+    rv = Start(mStream, mTrackID, mPrincipal);
+    if (NS_WARN_IF(NS_FAILED(rv))) {
+      return rv;
     }
-    if (mState != kStarted) {
-      return NS_ERROR_FAILURE;
-    }
-
-    mState = kStopped;
   }
 
-  mozilla::camera::GetChildAndCall(
-    &mozilla::camera::CamerasChild::StopCapture,
-    mCapEngine, mCaptureIndex);
-
   return NS_OK;
 }
 
-nsresult
-MediaEngineRemoteVideoSource::Restart(AllocationHandle* aHandle,
-                                      const dom::MediaTrackConstraints& aConstraints,
-                                      const MediaEnginePrefs& aPrefs,
-                                      const nsString& aDeviceId,
-                                      const char** aOutBadConstraint)
+size_t
+MediaEngineRemoteVideoSource::NumCapabilities() const
 {
   AssertIsOnOwningThread();
-  if (!mInitDone) {
-    LOG(("Init not done"));
-    return NS_ERROR_FAILURE;
-  }
-  MOZ_ASSERT(aHandle);
-  NormalizedConstraints constraints(aConstraints);
-  return ReevaluateAllocation(aHandle, &constraints, aPrefs, aDeviceId,
-                              aOutBadConstraint);
-}
 
-nsresult
-MediaEngineRemoteVideoSource::UpdateSingleSource(
-    const AllocationHandle* aHandle,
-    const NormalizedConstraints& aNetConstraints,
-    const NormalizedConstraints& aNewConstraint,
-    const MediaEnginePrefs& aPrefs,
-    const nsString& aDeviceId,
-    const char** aOutBadConstraint)
-{
-  switch (mState) {
-    case kReleased:
-      MOZ_ASSERT(aHandle);
-      mHandleId = aHandle->mId;
-      LOG(("ChooseCapability(kFitness) for mTargetCapability and mCapability ++"));
-      if (!ChooseCapability(aNetConstraints, aPrefs, aDeviceId, mCapability, kFitness)) {
-        *aOutBadConstraint = FindBadConstraint(aNetConstraints, *this, aDeviceId);
-        return NS_ERROR_FAILURE;
-      }
-      LOG(("ChooseCapability(kFitness) for mTargetCapability and mCapability --"));
-      mTargetCapability = mCapability;
-
-      if (camera::GetChildAndCall(&camera::CamerasChild::AllocateCaptureDevice,
-                                  mCapEngine, GetUUID().get(),
-                                  kMaxUniqueIdLength, mCaptureIndex,
-                                  aHandle->mPrincipalInfo)) {
-        return NS_ERROR_FAILURE;
-      }
-      mState = kAllocated;
-      SetLastCapability(mCapability);
-      LOG(("Video device %d allocated", mCaptureIndex));
-      break;
+  mHardcodedCapabilities.Clear();
+  int num = camera::GetChildAndCall(&camera::CamerasChild::NumberOfCapabilities,
+                                    mCapEngine, mUniqueId.get());
 
-    case kStarted:
-      {
-        size_t index = mHandleIds.NoIndex;
-        if (aHandle) {
-          mHandleId = aHandle->mId;
-          index = mHandleIds.IndexOf(mHandleId);
-        }
-
-        LOG(("ChooseCapability(kFitness) for mTargetCapability ++"));
-        if (!ChooseCapability(aNewConstraint, aPrefs, aDeviceId, mTargetCapability,
-                              kFitness)) {
-          *aOutBadConstraint = FindBadConstraint(aNewConstraint, *this, aDeviceId);
-          return NS_ERROR_FAILURE;
-        }
-        LOG(("ChooseCapability(kFitness) for mTargetCapability --"));
-
-        if (index != mHandleIds.NoIndex) {
-          MonitorAutoLock lock(mMonitor);
-          mTargetCapabilities[index] = mTargetCapability;
-          MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
-          MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
-          MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
-          MOZ_ASSERT(mSources.Length() == mImages.Length());
-        }
+  if (num >= 1) {
+    return num;
+  }
 
-        LOG(("ChooseCapability(kFeasibility) for mCapability ++"));
-        if (!ChooseCapability(aNetConstraints, aPrefs, aDeviceId, mCapability,
-                              kFeasibility)) {
-          *aOutBadConstraint = FindBadConstraint(aNetConstraints, *this, aDeviceId);
-          return NS_ERROR_FAILURE;
-        }
-        LOG(("ChooseCapability(kFeasibility) for mCapability --"));
-
-        if (mCapability != mLastCapability) {
-          camera::GetChildAndCall(&camera::CamerasChild::StopCapture,
-                                  mCapEngine, mCaptureIndex);
-          if (camera::GetChildAndCall(&camera::CamerasChild::StartCapture,
-                                      mCapEngine, mCaptureIndex, mCapability,
-                                      this)) {
-            LOG(("StartCapture failed"));
-            return NS_ERROR_FAILURE;
-          }
-          SetLastCapability(mCapability);
-        }
-        break;
-      }
-
-    default:
-      LOG(("Video device %d in ignored state %d", mCaptureIndex, mState));
-      break;
-  }
-  return NS_OK;
+  // The default for devices that don't return discrete capabilities: treat
+  // them as supporting all capabilities orthogonally. E.g. screensharing.
+  // CaptureCapability defaults key values to 0, which means accept any value.
+  mHardcodedCapabilities.AppendElement(webrtc::CaptureCapability());
+  return mHardcodedCapabilities.Length(); // 1
 }
 
 void
-MediaEngineRemoteVideoSource::SetLastCapability(
-    const webrtc::CaptureCapability& aCapability)
+MediaEngineRemoteVideoSource::GetCapability(size_t aIndex,
+                                            webrtc::CaptureCapability& aOut) const
 {
-  mLastCapability = mCapability;
-
-  webrtc::CaptureCapability cap = aCapability;
-  switch (mMediaSource) {
-    case dom::MediaSourceEnum::Screen:
-    case dom::MediaSourceEnum::Window:
-    case dom::MediaSourceEnum::Application:
-      // Undo the hack where ideal and max constraints are crammed together
-      // in mCapability for consumption by low-level code. We don't actually
-      // know the real resolution yet, so report min(ideal, max) for now.
-      cap.width = std::min(cap.width >> 16, cap.width & 0xffff);
-      cap.height = std::min(cap.height >> 16, cap.height & 0xffff);
-      break;
-
-    default:
-      break;
+  AssertIsOnOwningThread();
+  if (!mHardcodedCapabilities.IsEmpty()) {
+    MOZ_ASSERT(aIndex < mHardcodedCapabilities.Length());
+    aOut = mHardcodedCapabilities.SafeElementAt(aIndex, webrtc::CaptureCapability());
   }
-  auto settings = mSettings;
-
-  NS_DispatchToMainThread(media::NewRunnableFrom([settings, cap]() mutable {
-    settings->mWidth.Value() = cap.width;
-    settings->mHeight.Value() = cap.height;
-    settings->mFrameRate.Value() = cap.maxFPS;
-    return NS_OK;
-  }));
+  camera::GetChildAndCall(&camera::CamerasChild::GetCaptureCapability,
+                          mCapEngine, mUniqueId.get(), aIndex, aOut);
 }
 
 void
-MediaEngineRemoteVideoSource::NotifyPull(MediaStreamGraph* aGraph,
-                                         SourceMediaStream* aSource,
-                                         TrackID aID, StreamTime aDesiredTime,
-                                         const PrincipalHandle& aPrincipalHandle)
+MediaEngineRemoteVideoSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
+                                   const RefPtr<SourceMediaStream>& aStream,
+                                   TrackID aTrackID,
+                                   StreamTime aDesiredTime,
+                                   const PrincipalHandle& aPrincipalHandle)
 {
-  StreamTime delta = 0;
-  size_t i;
-  MonitorAutoLock lock(mMonitor);
-  if (mState != kStarted) {
-    return;
-  }
+  MutexAutoLock lock(mMutex);
+  MOZ_ASSERT(mState == kStarted || mState == kStopped);
 
-  i = mSources.IndexOf(aSource);
-  if (i == mSources.NoIndex) {
+  StreamTime delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
+  if (delta <= 0) {
     return;
   }
 
-  delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
-
-  if (delta > 0) {
-    AppendToTrack(aSource, mImages[i], aID, delta, aPrincipalHandle);
+  VideoSegment segment;
+  RefPtr<layers::Image> image = mImage;
+  if (image) {
+    MOZ_ASSERT(mImageSize == image->GetSize());
+    segment.AppendFrame(image.forget(), delta, mImageSize, aPrincipalHandle);
+  } else {
+    // nullptr images are allowed, but we force it to black and retain the size.
+    segment.AppendFrame(image.forget(), delta, mImageSize, aPrincipalHandle, true);
   }
-}
 
-void
-MediaEngineRemoteVideoSource::FrameSizeChange(unsigned int w, unsigned int h)
-{
-  if ((mWidth < 0) || (mHeight < 0) ||
-      (w !=  (unsigned int) mWidth) || (h != (unsigned int) mHeight)) {
-    LOG(("MediaEngineRemoteVideoSource Video FrameSizeChange: %ux%u was %ux%u", w, h, mWidth, mHeight));
-    mWidth = w;
-    mHeight = h;
-
-    auto settings = mSettings;
-    NS_DispatchToMainThread(media::NewRunnableFrom([settings, w, h]() mutable {
-      settings->mWidth.Value() = w;
-      settings->mHeight.Value() = h;
-      return NS_OK;
-    }));
-  }
+  // This is safe from any thread, and is safe if the track is Finished
+  // or Destroyed.
+  // This can fail if either a) we haven't added the track yet, or b)
+  // we've removed or finished the track.
+  aStream->AppendToTrack(aTrackID, &segment);
 }
 
 int
 MediaEngineRemoteVideoSource::DeliverFrame(uint8_t* aBuffer,
-                                    const camera::VideoFrameProperties& aProps)
+                                           const camera::VideoFrameProperties& aProps)
 {
-  MonitorAutoLock lock(mMonitor);
-  // Check for proper state.
-  if (mState != kStarted || !mImageContainer) {
-    LOG(("DeliverFrame: video not started"));
+  // Cameras IPC thread - take great care with accessing members!
+
+  int32_t req_max_width;
+  int32_t req_max_height;
+  int32_t req_ideal_width;
+  int32_t req_ideal_height;
+  {
+    MutexAutoLock lock(mMutex);
+    MOZ_ASSERT(mState == kStarted);
+    req_max_width = mCapability.width & 0xffff;
+    req_max_height = mCapability.height & 0xffff;
+    req_ideal_width = (mCapability.width >> 16) & 0xffff;
+    req_ideal_height = (mCapability.height >> 16) & 0xffff;
+  }
+
+  int32_t dest_max_width = std::min(req_max_width, aProps.width());
+  int32_t dest_max_height = std::min(req_max_height, aProps.height());
+  // This logic works for both camera and screen sharing case.
+  // for camera case, req_ideal_width and req_ideal_height is 0.
+  // The following snippet will set dst_width to dest_max_width and dst_height to dest_max_height
+  int32_t dst_width = std::min(req_ideal_width > 0 ? req_ideal_width : aProps.width(), dest_max_width);
+  int32_t dst_height = std::min(req_ideal_height > 0 ? req_ideal_height : aProps.height(), dest_max_height);
+
+  int dst_stride_y = dst_width;
+  int dst_stride_uv = (dst_width + 1) / 2;
+
+  camera::VideoFrameProperties properties;
+  uint8_t* frame;
+  bool needReScale = (dst_width != aProps.width() ||
+                      dst_height != aProps.height()) &&
+                     dst_width <= aProps.width() &&
+                     dst_height <= aProps.height();
+
+  if (!needReScale) {
+    dst_width = aProps.width();
+    dst_height = aProps.height();
+    frame = aBuffer;
+  } else {
+    rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer;
+    i420Buffer = webrtc::I420Buffer::Create(aProps.width(),
+                                            aProps.height(),
+                                            aProps.width(),
+                                            (aProps.width() + 1) / 2,
+                                            (aProps.width() + 1) / 2);
+
+    const int conversionResult = webrtc::ConvertToI420(webrtc::kI420,
+                                                       aBuffer,
+                                                       0, 0,  // No cropping
+                                                       aProps.width(), aProps.height(),
+                                                       aProps.width() * aProps.height() * 3 / 2,
+                                                       webrtc::kVideoRotation_0,
+                                                       i420Buffer.get());
+
+    webrtc::VideoFrame captureFrame(i420Buffer, 0, 0, webrtc::kVideoRotation_0);
+    if (conversionResult < 0) {
+      return 0;
+    }
+
+    rtc::scoped_refptr<webrtc::I420Buffer> scaledBuffer;
+    scaledBuffer = webrtc::I420Buffer::Create(dst_width, dst_height, dst_stride_y,
+                                              dst_stride_uv, dst_stride_uv);
+
+    scaledBuffer->CropAndScaleFrom(*captureFrame.video_frame_buffer().get());
+    webrtc::VideoFrame scaledFrame(scaledBuffer, 0, 0, webrtc::kVideoRotation_0);
+
+    VideoFrameUtils::InitFrameBufferProperties(scaledFrame, properties);
+    frame = new unsigned char[properties.bufferSize()];
+
+    if (!frame) {
+      return 0;
+    }
+
+    VideoFrameUtils::CopyVideoFrameBuffers(frame,
+                                           properties.bufferSize(), scaledFrame);
+  }
+
+  // Create a video frame and append it to the track.
+  RefPtr<layers::PlanarYCbCrImage> image =
+    mImageContainer->CreatePlanarYCbCrImage();
+
+  const uint8_t lumaBpp = 8;
+  const uint8_t chromaBpp = 4;
+
+  layers::PlanarYCbCrData data;
+
+  // Take lots of care to round up!
+  data.mYChannel = frame;
+  data.mYSize = IntSize(dst_width, dst_height);
+  data.mYStride = (dst_width * lumaBpp + 7) / 8;
+  data.mCbCrStride = (dst_width * chromaBpp + 7) / 8;
+  data.mCbChannel = frame + dst_height * data.mYStride;
+  data.mCrChannel = data.mCbChannel + ((dst_height + 1) / 2) * data.mCbCrStride;
+  data.mCbCrSize = IntSize((dst_width + 1) / 2, (dst_height + 1) / 2);
+  data.mPicX = 0;
+  data.mPicY = 0;
+  data.mPicSize = IntSize(dst_width, dst_height);
+  data.mStereoMode = StereoMode::MONO;
+
+  if (!image->CopyData(data)) {
+    MOZ_ASSERT(false);
     return 0;
   }
 
-  // Update the dimensions
-  FrameSizeChange(aProps.width(), aProps.height());
-
-  MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
-  MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
-  MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
-  MOZ_ASSERT(mSources.Length() == mImages.Length());
-
-  for (uint32_t i = 0; i < mTargetCapabilities.Length(); i++ ) {
-    int32_t req_max_width = mTargetCapabilities[i].width & 0xffff;
-    int32_t req_max_height = mTargetCapabilities[i].height & 0xffff;
-    int32_t req_ideal_width = (mTargetCapabilities[i].width >> 16) & 0xffff;
-    int32_t req_ideal_height = (mTargetCapabilities[i].height >> 16) & 0xffff;
-
-    int32_t dest_max_width = std::min(req_max_width, mWidth);
-    int32_t dest_max_height = std::min(req_max_height, mHeight);
-    // This logic works for both camera and screen sharing case.
-    // for camera case, req_ideal_width and req_ideal_height is 0.
-    // The following snippet will set dst_width to dest_max_width and dst_height to dest_max_height
-    int32_t dst_width = std::min(req_ideal_width > 0 ? req_ideal_width : mWidth, dest_max_width);
-    int32_t dst_height = std::min(req_ideal_height > 0 ? req_ideal_height : mHeight, dest_max_height);
-
-    int dst_stride_y = dst_width;
-    int dst_stride_uv = (dst_width + 1) / 2;
-
-    camera::VideoFrameProperties properties;
-    uint8_t* frame;
-    bool needReScale = !((dst_width == mWidth && dst_height == mHeight) ||
-                         (dst_width > mWidth || dst_height > mHeight));
-
-    if (!needReScale) {
-      dst_width = mWidth;
-      dst_height = mHeight;
-      frame = aBuffer;
-    } else {
-      rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer;
-      i420Buffer = webrtc::I420Buffer::Create(mWidth, mHeight, mWidth,
-                                              (mWidth + 1) / 2, (mWidth + 1) / 2);
-
-      const int conversionResult = webrtc::ConvertToI420(webrtc::kI420,
-                                                         aBuffer,
-                                                         0, 0,  // No cropping
-                                                         mWidth, mHeight,
-                                                         mWidth * mHeight * 3 / 2,
-                                                         webrtc::kVideoRotation_0,
-                                                         i420Buffer.get());
-
-      webrtc::VideoFrame captureFrame(i420Buffer, 0, 0, webrtc::kVideoRotation_0);
-      if (conversionResult < 0) {
-        return 0;
-      }
-
-      rtc::scoped_refptr<webrtc::I420Buffer> scaledBuffer;
-      scaledBuffer = webrtc::I420Buffer::Create(dst_width, dst_height, dst_stride_y,
-                                                dst_stride_uv, dst_stride_uv);
-
-      scaledBuffer->CropAndScaleFrom(*captureFrame.video_frame_buffer().get());
-      webrtc::VideoFrame scaledFrame(scaledBuffer, 0, 0, webrtc::kVideoRotation_0);
-
-      VideoFrameUtils::InitFrameBufferProperties(scaledFrame, properties);
-      frame = new unsigned char[properties.bufferSize()];
-
-      if (!frame) {
-        return 0;
-      }
-
-      VideoFrameUtils::CopyVideoFrameBuffers(frame,
-                                             properties.bufferSize(), scaledFrame);
-    }
-
-    // Create a video frame and append it to the track.
-    RefPtr<layers::PlanarYCbCrImage> image = mImageContainer->CreatePlanarYCbCrImage();
-
-    const uint8_t lumaBpp = 8;
-    const uint8_t chromaBpp = 4;
-
-    layers::PlanarYCbCrData data;
-
-    // Take lots of care to round up!
-    data.mYChannel = frame;
-    data.mYSize = IntSize(dst_width, dst_height);
-    data.mYStride = (dst_width * lumaBpp + 7) / 8;
-    data.mCbCrStride = (dst_width * chromaBpp + 7) / 8;
-    data.mCbChannel = frame + dst_height * data.mYStride;
-    data.mCrChannel = data.mCbChannel + ((dst_height + 1) / 2) * data.mCbCrStride;
-    data.mCbCrSize = IntSize((dst_width + 1) / 2, (dst_height + 1) / 2);
-    data.mPicX = 0;
-    data.mPicY = 0;
-    data.mPicSize = IntSize(dst_width, dst_height);
-    data.mStereoMode = StereoMode::MONO;
-
-    if (!image->CopyData(data)) {
-      MOZ_ASSERT(false);
-      return 0;
-    }
-
-    if (needReScale && frame) {
-      delete frame;
-      frame = nullptr;
-    }
+  if (needReScale && frame) {
+    delete frame;
+    frame = nullptr;
+  }
 
 #ifdef DEBUG
-    static uint32_t frame_num = 0;
-    LOGFRAME(("frame %d (%dx%d); timeStamp %u, ntpTimeMs %" PRIu64 ", renderTimeMs %" PRIu64,
-              frame_num++, mWidth, mHeight,
-              aProps.timeStamp(), aProps.ntpTimeMs(), aProps.renderTimeMs()));
+  static uint32_t frame_num = 0;
+  LOGFRAME(("frame %d (%dx%d)->(%dx%d); timeStamp %u, ntpTimeMs %" PRIu64 ", renderTimeMs %" PRIu64,
+            frame_num++, aProps.width(), aProps.height(), dst_width, dst_height,
+            aProps.timeStamp(), aProps.ntpTimeMs(), aProps.renderTimeMs()));
 #endif
 
+  bool sizeChanged = false;
+  {
+    MutexAutoLock lock(mMutex);
     // implicitly releases last image
-    mImages[i] = image.forget();
+    sizeChanged = mImage && image && mImage->GetSize() != image->GetSize();
+    mImage = image.forget();
+    mImageSize = mImage->GetSize();
   }
 
-  // We'll push the frame into the MSG on the next NotifyPull. This will avoid
+  if (sizeChanged) {
+    NS_DispatchToMainThread(NS_NewRunnableFunction(
+        "MediaEngineRemoteVideoSource::FrameSizeChange",
+        [settings = mSettings, dst_width, dst_height]() mutable {
+      settings->mWidth.Value() = dst_width;
+      settings->mHeight.Value() = dst_height;
+    }));
+  }
+
+  // We'll push the frame into the MSG on the next Pull. This will avoid
   // swamping the MSG with frames should it be taking longer than normal to run
   // an iteration.
 
   return 0;
 }
 
-size_t
-MediaEngineRemoteVideoSource::NumCapabilities() const
+uint32_t
+MediaEngineRemoteVideoSource::GetDistance(
+    const webrtc::CaptureCapability& aCandidate,
+    const NormalizedConstraintSet &aConstraints,
+    const nsString& aDeviceId,
+    const DistanceCalculation aCalculate) const
+{
+  if (aCalculate == kFeasibility) {
+    return GetFeasibilityDistance(aCandidate, aConstraints, aDeviceId);
+  }
+  return GetFitnessDistance(aCandidate, aConstraints, aDeviceId);
+}
+
+uint32_t
+MediaEngineRemoteVideoSource::GetFitnessDistance(
+    const webrtc::CaptureCapability& aCandidate,
+    const NormalizedConstraintSet& aConstraints,
+    const nsString& aDeviceId) const
+{
+  AssertIsOnOwningThread();
+
+  // Treat width|height|frameRate == 0 on capability as "can do any".
+  // This allows for orthogonal capabilities that are not in discrete steps.
+
+  typedef MediaConstraintsHelper H;
+  uint64_t distance =
+    uint64_t(H::FitnessDistance(aDeviceId, aConstraints.mDeviceId)) +
+    uint64_t(H::FitnessDistance(mFacingMode, aConstraints.mFacingMode)) +
+    uint64_t(aCandidate.width ? H::FitnessDistance(int32_t(aCandidate.width),
+                                                  aConstraints.mWidth) : 0) +
+    uint64_t(aCandidate.height ? H::FitnessDistance(int32_t(aCandidate.height),
+                                                    aConstraints.mHeight) : 0) +
+    uint64_t(aCandidate.maxFPS ? H::FitnessDistance(double(aCandidate.maxFPS),
+                                                    aConstraints.mFrameRate) : 0);
+  return uint32_t(std::min(distance, uint64_t(UINT32_MAX)));
+}
+
+uint32_t
+MediaEngineRemoteVideoSource::GetFeasibilityDistance(
+    const webrtc::CaptureCapability& aCandidate,
+    const NormalizedConstraintSet& aConstraints,
+    const nsString& aDeviceId) const
+{
+  AssertIsOnOwningThread();
+
+  // Treat width|height|frameRate == 0 on capability as "can do any".
+  // This allows for orthogonal capabilities that are not in discrete steps.
+
+  typedef MediaConstraintsHelper H;
+  uint64_t distance =
+    uint64_t(H::FitnessDistance(aDeviceId, aConstraints.mDeviceId)) +
+    uint64_t(H::FitnessDistance(mFacingMode, aConstraints.mFacingMode)) +
+    uint64_t(aCandidate.width ? H::FeasibilityDistance(int32_t(aCandidate.width),
+                                                       aConstraints.mWidth) : 0) +
+    uint64_t(aCandidate.height ? H::FeasibilityDistance(int32_t(aCandidate.height),
+                                                        aConstraints.mHeight) : 0) +
+    uint64_t(aCandidate.maxFPS ? H::FeasibilityDistance(double(aCandidate.maxFPS),
+                                                        aConstraints.mFrameRate) : 0);
+  return uint32_t(std::min(distance, uint64_t(UINT32_MAX)));
+}
+
+// Find best capability by removing inferiors. May leave >1 of equal distance
+
+/* static */ void
+MediaEngineRemoteVideoSource::TrimLessFitCandidates(nsTArray<CapabilityCandidate>& set)
+{
+  uint32_t best = UINT32_MAX;
+  for (auto& candidate : set) {
+    if (best > candidate.mDistance) {
+      best = candidate.mDistance;
+    }
+  }
+  for (size_t i = 0; i < set.Length();) {
+    if (set[i].mDistance > best) {
+      set.RemoveElementAt(i);
+    } else {
+      ++i;
+    }
+  }
+  MOZ_ASSERT(set.Length());
+}
+
+uint32_t
+MediaEngineRemoteVideoSource::GetBestFitnessDistance(
+    const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
+    const nsString& aDeviceId) const
 {
-  mHardcodedCapabilities.Clear();
-  int num = mozilla::camera::GetChildAndCall(
-      &mozilla::camera::CamerasChild::NumberOfCapabilities,
-      mCapEngine,
-      GetUUID().get());
-  if (num < 1) {
-    // The default for devices that don't return discrete capabilities: treat
-    // them as supporting all capabilities orthogonally. E.g. screensharing.
-    // CaptureCapability defaults key values to 0, which means accept any value.
-    mHardcodedCapabilities.AppendElement(webrtc::CaptureCapability());
-    num = mHardcodedCapabilities.Length(); // 1
+  AssertIsOnOwningThread();
+
+  size_t num = NumCapabilities();
+
+  nsTArray<CapabilityCandidate> candidateSet;
+  for (size_t i = 0; i < num; i++) {
+    candidateSet.AppendElement(i);
+  }
+
+  bool first = true;
+  for (const NormalizedConstraintSet* ns : aConstraintSets) {
+    for (size_t i = 0; i < candidateSet.Length();  ) {
+      auto& candidate = candidateSet[i];
+      webrtc::CaptureCapability cap;
+      GetCapability(candidate.mIndex, cap);
+      uint32_t distance = GetFitnessDistance(cap, *ns, aDeviceId);
+      if (distance == UINT32_MAX) {
+        candidateSet.RemoveElementAt(i);
+      } else {
+        ++i;
+        if (first) {
+          candidate.mDistance = distance;
+        }
+      }
+    }
+    first = false;
+  }
+  if (!candidateSet.Length()) {
+    return UINT32_MAX;
+  }
+  TrimLessFitCandidates(candidateSet);
+  return candidateSet[0].mDistance;
+}
+
+static void
+LogConstraints(const NormalizedConstraintSet& aConstraints)
+{
+  auto& c = aConstraints;
+  if (c.mWidth.mIdeal.isSome()) {
+    LOG(("Constraints: width: { min: %d, max: %d, ideal: %d }",
+         c.mWidth.mMin, c.mWidth.mMax,
+         c.mWidth.mIdeal.valueOr(0)));
+  } else {
+    LOG(("Constraints: width: { min: %d, max: %d }",
+         c.mWidth.mMin, c.mWidth.mMax));
   }
-  return num;
+  if (c.mHeight.mIdeal.isSome()) {
+    LOG(("             height: { min: %d, max: %d, ideal: %d }",
+         c.mHeight.mMin, c.mHeight.mMax,
+         c.mHeight.mIdeal.valueOr(0)));
+  } else {
+    LOG(("             height: { min: %d, max: %d }",
+         c.mHeight.mMin, c.mHeight.mMax));
+  }
+  if (c.mFrameRate.mIdeal.isSome()) {
+    LOG(("             frameRate: { min: %f, max: %f, ideal: %f }",
+         c.mFrameRate.mMin, c.mFrameRate.mMax,
+         c.mFrameRate.mIdeal.valueOr(0)));
+  } else {
+    LOG(("             frameRate: { min: %f, max: %f }",
+         c.mFrameRate.mMin, c.mFrameRate.mMax));
+  }
+}
+
+static void
+LogCapability(const char* aHeader,
+              const webrtc::CaptureCapability &aCapability,
+              uint32_t aDistance)
+{
+  // RawVideoType and VideoCodecType media/webrtc/trunk/webrtc/common_types.h
+  static const char* const types[] = {
+    "I420",
+    "YV12",
+    "YUY2",
+    "UYVY",
+    "IYUV",
+    "ARGB",
+    "RGB24",
+    "RGB565",
+    "ARGB4444",
+    "ARGB1555",
+    "MJPEG",
+    "NV12",
+    "NV21",
+    "BGRA",
+    "Unknown type"
+  };
+
+  static const char* const codec[] = {
+    "VP8",
+    "VP9",
+    "H264",
+    "I420",
+    "RED",
+    "ULPFEC",
+    "Generic codec",
+    "Unknown codec"
+  };
+
+  LOG(("%s: %4u x %4u x %2u maxFps, %s, %s. Distance = %" PRIu32,
+       aHeader, aCapability.width, aCapability.height, aCapability.maxFPS,
+       types[std::min(std::max(uint32_t(0), uint32_t(aCapability.rawType)),
+                      uint32_t(sizeof(types) / sizeof(*types) - 1))],
+       codec[std::min(std::max(uint32_t(0), uint32_t(aCapability.codecType)),
+                      uint32_t(sizeof(codec) / sizeof(*codec) - 1))],
+       aDistance));
 }
 
 bool
 MediaEngineRemoteVideoSource::ChooseCapability(
-    const NormalizedConstraints &aConstraints,
-    const MediaEnginePrefs &aPrefs,
+    const NormalizedConstraints& aConstraints,
+    const MediaEnginePrefs& aPrefs,
     const nsString& aDeviceId,
     webrtc::CaptureCapability& aCapability,
     const DistanceCalculation aCalculate)
 {
+  LOG((__PRETTY_FUNCTION__));
   AssertIsOnOwningThread();
 
-  switch(mMediaSource) {
-    case dom::MediaSourceEnum::Screen:
-    case dom::MediaSourceEnum::Window:
-    case dom::MediaSourceEnum::Application: {
+  if (MOZ_LOG_TEST(GetMediaManagerLog(), LogLevel::Debug)) {
+    LOG(("ChooseCapability: prefs: %dx%d @%dfps",
+         aPrefs.GetWidth(), aPrefs.GetHeight(),
+         aPrefs.mFPS));
+    LogConstraints(aConstraints);
+    if (!aConstraints.mAdvanced.empty()) {
+      LOG(("Advanced array[%zu]:", aConstraints.mAdvanced.size()));
+      for (auto& advanced : aConstraints.mAdvanced) {
+        LogConstraints(advanced);
+      }
+    }
+  }
+
+  switch (mMediaSource) {
+    case MediaSourceEnum::Screen:
+    case MediaSourceEnum::Window:
+    case MediaSourceEnum::Application: {
       FlattenedConstraints c(aConstraints);
       // The actual resolution to constrain around is not easy to find ahead of
       // time (and may in fact change over time), so as a hack, we push ideal
       // and max constraints down to desktop_capture_impl.cc and finish the
       // algorithm there.
       aCapability.width =
         (c.mWidth.mIdeal.valueOr(0) & 0xffff) << 16 | (c.mWidth.mMax & 0xffff);
       aCapability.height =
         (c.mHeight.mIdeal.valueOr(0) & 0xffff) << 16 | (c.mHeight.mMax & 0xffff);
       aCapability.maxFPS =
         c.mFrameRate.Clamp(c.mFrameRate.mIdeal.valueOr(aPrefs.mFPS));
       return true;
     }
     default:
-      return MediaEngineCameraVideoSource::ChooseCapability(aConstraints, aPrefs, aDeviceId, aCapability, aCalculate);
+      break;
+  }
+
+  size_t num = NumCapabilities();
+
+  nsTArray<CapabilityCandidate> candidateSet;
+  for (size_t i = 0; i < num; i++) {
+    candidateSet.AppendElement(i);
+  }
+
+  // First, filter capabilities by required constraints (min, max, exact).
+
+  for (size_t i = 0; i < candidateSet.Length();) {
+    auto& candidate = candidateSet[i];
+    webrtc::CaptureCapability cap;
+    GetCapability(candidate.mIndex, cap);
+    candidate.mDistance = GetDistance(cap, aConstraints, aDeviceId, aCalculate);
+    LogCapability("Capability", cap, candidate.mDistance);
+    if (candidate.mDistance == UINT32_MAX) {
+      candidateSet.RemoveElementAt(i);
+    } else {
+      ++i;
+    }
+  }
+
+  if (!candidateSet.Length()) {
+    LOG(("failed to find capability match from %zu choices",num));
+    return false;
   }
 
+  // Filter further with all advanced constraints (that don't overconstrain).
+
+  for (const auto &cs : aConstraints.mAdvanced) {
+    nsTArray<CapabilityCandidate> rejects;
+    for (size_t i = 0; i < candidateSet.Length();) {
+      auto& candidate = candidateSet[i];
+      webrtc::CaptureCapability cap;
+      GetCapability(candidate.mIndex, cap);
+      if (GetDistance(cap, cs, aDeviceId, aCalculate) == UINT32_MAX) {
+        rejects.AppendElement(candidate);
+        candidateSet.RemoveElementAt(i);
+      } else {
+        ++i;
+      }
+    }
+    if (!candidateSet.Length()) {
+      candidateSet.AppendElements(Move(rejects));
+    }
+  }
+  MOZ_ASSERT(candidateSet.Length(),
+             "advanced constraints filtering step can't reduce candidates to zero");
+
+  // Remaining algorithm is up to the UA.
+
+  TrimLessFitCandidates(candidateSet);
+
+  // Any remaining multiples all have the same distance. A common case of this
+  // occurs when no ideal is specified. Lean toward defaults.
+  uint32_t sameDistance = candidateSet[0].mDistance;
+  {
+    MediaTrackConstraintSet prefs;
+    prefs.mWidth.SetAsLong() = aPrefs.GetWidth();
+    prefs.mHeight.SetAsLong() = aPrefs.GetHeight();
+    prefs.mFrameRate.SetAsDouble() = aPrefs.mFPS;
+    NormalizedConstraintSet normPrefs(prefs, false);
+
+    for (auto& candidate : candidateSet) {
+      webrtc::CaptureCapability cap;
+      GetCapability(candidate.mIndex, cap);
+      candidate.mDistance = GetDistance(cap, normPrefs, aDeviceId, aCalculate);
+    }
+    TrimLessFitCandidates(candidateSet);
+  }
+
+  // Any remaining multiples all have the same distance, but may vary on
+  // format. Some formats are more desirable for certain use like WebRTC.
+  // E.g. I420 over RGB24 can remove a needless format conversion.
+
+  bool found = false;
+  for (auto& candidate : candidateSet) {
+    webrtc::CaptureCapability cap;
+    GetCapability(candidate.mIndex, cap);
+    if (cap.rawType == webrtc::RawVideoType::kVideoI420 ||
+        cap.rawType == webrtc::RawVideoType::kVideoYUY2 ||
+        cap.rawType == webrtc::RawVideoType::kVideoYV12) {
+      aCapability = cap;
+      found = true;
+      break;
+    }
+  }
+  if (!found) {
+    GetCapability(candidateSet[0].mIndex, aCapability);
+  }
+
+  LogCapability("Chosen capability", aCapability, sameDistance);
+  return true;
 }
 
 void
-MediaEngineRemoteVideoSource::GetCapability(size_t aIndex,
-                                            webrtc::CaptureCapability& aOut) const
+MediaEngineRemoteVideoSource::GetSettings(MediaTrackSettings& aOutSettings) const
 {
-  if (!mHardcodedCapabilities.IsEmpty()) {
-    MediaEngineCameraVideoSource::GetCapability(aIndex, aOut);
-  }
-  mozilla::camera::GetChildAndCall(
-    &mozilla::camera::CamerasChild::GetCaptureCapability,
-    mCapEngine,
-    GetUUID().get(),
-    aIndex,
-    aOut);
+  MOZ_ASSERT(NS_IsMainThread());
+  aOutSettings = *mSettings;
 }
 
-void MediaEngineRemoteVideoSource::Refresh(int aIndex) {
+void
+MediaEngineRemoteVideoSource::Refresh(int aIndex)
+{
+  LOG((__PRETTY_FUNCTION__));
+  AssertIsOnOwningThread();
+
   // NOTE: mCaptureIndex might have changed when allocated!
   // Use aIndex to update information, but don't change mCaptureIndex!!
   // Caller looked up this source by uniqueId, so it shouldn't change
   char deviceName[kMaxDeviceNameLength];
   char uniqueId[kMaxUniqueIdLength];
 
-  if (mozilla::camera::GetChildAndCall(
-    &mozilla::camera::CamerasChild::GetCaptureDevice,
-    mCapEngine, aIndex,
-    deviceName, sizeof(deviceName),
-    uniqueId, sizeof(uniqueId), nullptr)) {
+  if (camera::GetChildAndCall(&camera::CamerasChild::GetCaptureDevice,
+                              mCapEngine, aIndex, deviceName,
+                              sizeof(deviceName), uniqueId, sizeof(uniqueId),
+                              nullptr)) {
     return;
   }
 
   SetName(NS_ConvertUTF8toUTF16(deviceName));
-#ifdef DEBUG
-  MOZ_ASSERT(GetUUID().Equals(uniqueId));
-#endif
+  MOZ_ASSERT(mUniqueId.Equals(uniqueId));
 }
 
-}
+} // namespace mozilla
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.h
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.h
@@ -8,121 +8,240 @@
 #define MEDIAENGINE_REMOTE_VIDEO_SOURCE_H_
 
 #include "prcvar.h"
 #include "prthread.h"
 #include "nsIThread.h"
 #include "nsIRunnable.h"
 
 #include "mozilla/Mutex.h"
-#include "mozilla/Monitor.h"
 #include "nsCOMPtr.h"
 #include "nsThreadUtils.h"
 #include "DOMMediaStream.h"
 #include "nsDirectoryServiceDefs.h"
 #include "nsComponentManagerUtils.h"
 
 // Avoid warnings about redefinition of WARN_UNUSED_RESULT
 #include "ipc/IPCMessageUtils.h"
 #include "VideoUtils.h"
-#include "MediaEngineCameraVideoSource.h"
+#include "MediaEngineSource.h"
 #include "VideoSegment.h"
 #include "AudioSegment.h"
 #include "StreamTracks.h"
 #include "MediaStreamGraph.h"
 
 #include "MediaEngineWrapper.h"
 #include "mozilla/dom/MediaStreamTrackBinding.h"
 
 // Camera Access via IPC
 #include "CamerasChild.h"
 
 #include "NullTransport.h"
 
+// WebRTC includes
+#include "webrtc/modules/video_capture/video_capture_defines.h"
+
+namespace webrtc {
+using CaptureCapability = VideoCaptureCapability;
+}
+
 namespace mozilla {
 
+// Fitness distance is defined in
+// https://w3c.github.io/mediacapture-main/getusermedia.html#dfn-selectsettings
+
+// The main difference of feasibility and fitness distance is that if the
+// constraint is required ('max', or 'exact'), and the settings dictionary's value
+// for the constraint does not satisfy the constraint, the fitness distance is
+// positive infinity. Given a continuous space of settings dictionaries comprising
+// all discrete combinations of dimension and frame-rate related properties,
+// the feasibility distance is still in keeping with the constraints algorithm.
+enum DistanceCalculation {
+  kFitness,
+  kFeasibility
+};
+
 /**
  * The WebRTC implementation of the MediaEngine interface.
  */
-class MediaEngineRemoteVideoSource : public MediaEngineCameraVideoSource,
+class MediaEngineRemoteVideoSource : public MediaEngineSource,
                                      public camera::FrameRelay
 {
-  typedef MediaEngineCameraVideoSource Super;
+  ~MediaEngineRemoteVideoSource() = default;
+
+  struct CapabilityCandidate {
+    explicit CapabilityCandidate(uint8_t index, uint32_t distance = 0)
+    : mIndex(index), mDistance(distance) {}
+
+    size_t mIndex;
+    uint32_t mDistance;
+  };
+  typedef nsTArray<CapabilityCandidate> CapabilitySet;
+
+  bool ChooseCapability(const NormalizedConstraints& aConstraints,
+                        const MediaEnginePrefs& aPrefs,
+                        const nsString& aDeviceId,
+                        webrtc::CaptureCapability& aCapability,
+                        const DistanceCalculation aCalculate);
+
+  uint32_t GetDistance(const webrtc::CaptureCapability& aCandidate,
+                       const NormalizedConstraintSet &aConstraints,
+                       const nsString& aDeviceId,
+                       const DistanceCalculation aCalculate) const;
+
+  uint32_t GetFitnessDistance(const webrtc::CaptureCapability& aCandidate,
+                              const NormalizedConstraintSet &aConstraints,
+                              const nsString& aDeviceId) const;
+
+  uint32_t GetFeasibilityDistance(const webrtc::CaptureCapability& aCandidate,
+                              const NormalizedConstraintSet &aConstraints,
+                              const nsString& aDeviceId) const;
+
+  static void TrimLessFitCandidates(CapabilitySet& set);
+
+  uint32_t GetBestFitnessDistance(
+      const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
+      const nsString& aDeviceId) const override;
+
 public:
-  NS_DECL_THREADSAFE_ISUPPORTS
+  MediaEngineRemoteVideoSource(int aIndex,
+                               camera::CaptureEngine aCapEngine,
+                               dom::MediaSourceEnum aMediaSource,
+                               bool aScary);
 
-  // Old ExternalRenderer
-  void FrameSizeChange(unsigned int w, unsigned int h) override;
   // ExternalRenderer
   int DeliverFrame(uint8_t* buffer,
                    const camera::VideoFrameProperties& properties) override;
 
-  // MediaEngineCameraVideoSource
-  MediaEngineRemoteVideoSource(int aIndex, mozilla::camera::CaptureEngine aCapEngine,
-                               dom::MediaSourceEnum aMediaSource,
-                               bool aScary = false,
-                               const char* aMonitorName = "RemoteVideo.Monitor");
-
-  nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
-                    const MediaEnginePrefs& aPrefs,
+  // MediaEngineSource
+  bool IsAvailable() const override
+  {
+    AssertIsOnOwningThread();
+    return mState == kReleased;
+  }
+  dom::MediaSourceEnum GetMediaSource() const override
+  {
+    return mMediaSource;
+  }
+  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
+                    const MediaEnginePrefs &aPrefs,
                     const nsString& aDeviceId,
-                    const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
+                    const ipc::PrincipalInfo& aPrincipalInfo,
                     AllocationHandle** aOutHandle,
                     const char** aOutBadConstraint) override;
-  nsresult Deallocate(AllocationHandle* aHandle) override;
+  nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Start(SourceMediaStream*, TrackID, const PrincipalHandle&) override;
+  nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                       const dom::MediaTrackConstraints& aConstraints,
+                       const MediaEnginePrefs& aPrefs,
+                       const nsString& aDeviceId,
+                       const char** aOutBadConstraint) override;
   nsresult Stop(SourceMediaStream*, TrackID) override;
-  nsresult Restart(AllocationHandle* aHandle,
-                   const dom::MediaTrackConstraints& aConstraints,
-                   const MediaEnginePrefs &aPrefs,
-                   const nsString& aDeviceId,
-                   const char** aOutBadConstraint) override;
-  void NotifyPull(MediaStreamGraph* aGraph,
-                  SourceMediaStream* aSource,
-                  TrackID aId,
-                  StreamTime aDesiredTime,
-                  const PrincipalHandle& aPrincipalHandle) override;
-  dom::MediaSourceEnum GetMediaSource() const override {
-    return mMediaSource;
-  }
+  void Pull(const RefPtr<const AllocationHandle>& aHandle,
+            const RefPtr<SourceMediaStream>& aStream,
+            TrackID aTrackID,
+            StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipalHandle) override;
 
-  bool ChooseCapability(
-    const NormalizedConstraints &aConstraints,
-    const MediaEnginePrefs &aPrefs,
-    const nsString& aDeviceId,
-    webrtc::CaptureCapability& aCapability,
-    const DistanceCalculation aCalculate) override;
+
+  void GetSettings(dom::MediaTrackSettings& aOutSettings) const override;
 
   void Refresh(int aIndex);
 
   void Shutdown() override;
 
-  bool GetScary() const override { return mScary; }
+  nsString GetName() const override;
+  void SetName(nsString aName);
 
-protected:
-  ~MediaEngineRemoteVideoSource() { }
+  nsCString GetUUID() const override;
+  void SetUUID(const char* aUUID);
+
+  bool GetScary() const override { return mScary; }
 
 private:
   // Initialize the needed Video engine interfaces.
   void Init();
-  size_t NumCapabilities() const override;
-  void GetCapability(size_t aIndex, webrtc::CaptureCapability& aOut) const override;
-  void SetLastCapability(const webrtc::CaptureCapability& aCapability);
+
+  /**
+   * Returns the number of capabilities for the underlying device.
+   *
+   * Guaranteed to return at least one capability.
+   */
+  size_t NumCapabilities() const;
+
+  /**
+   * Fills `aOut` with the capability properties of the device capability with
+   * index `aIndex`.
+   *
+   * It is an error to call this with `aIndex >= NumCapabilities()`.
+   */
+  void GetCapability(size_t aIndex, webrtc::CaptureCapability& aOut) const;
+
+  int mCaptureIndex;
+  const dom::MediaSourceEnum mMediaSource; // source of media (camera | application | screen)
+  const camera::CaptureEngine mCapEngine;
+  const bool mScary;
+
+  // mMutex protects certain members on 3 threads:
+  // MediaManager, Cameras IPC and MediaStreamGraph.
+  Mutex mMutex;
+
+  // Current state of this source.
+  // Set under mMutex on the owning thread. Accessed under one of the two.
+  MediaEngineSourceState mState = kReleased;
+
+  // The source stream that we feed video data to.
+  // Set under mMutex on the owning thread. Accessed under one of the two.
+  RefPtr<SourceMediaStream> mStream;
+
+  // The TrackID in mStream that we feed video data to.
+  // Set under mMutex on the owning thread. Accessed under one of the two.
+  TrackID mTrackID = TRACK_NONE;
+
+  // The PrincipalHandle that gets attached to the frames we feed to mStream.
+  // Set under mMutex on the owning thread. Accessed under one of the two.
+  PrincipalHandle mPrincipal = PRINCIPAL_HANDLE_NONE;
 
-  nsresult
-  UpdateSingleSource(const AllocationHandle* aHandle,
-                     const NormalizedConstraints& aNetConstraints,
-                     const NormalizedConstraints& aNewConstraint,
-                     const MediaEnginePrefs& aPrefs,
-                     const nsString& aDeviceId,
-                     const char** aOutBadConstraint) override;
+  // Set in Start() and Deallocate() on the owning thread.
+  // Accessed in DeliverFrame() on the camera IPC thread, guaranteed to happen
+  // after Start() and before the end of Stop().
+  RefPtr<layers::ImageContainer> mImageContainer;
+
+  // The latest frame delivered from the video capture backend.
+  // Protected by mMutex.
+  RefPtr<layers::Image> mImage;
+
+  // The intrinsic size of the latest captured image, so we can feed black
+  // images of the same size while stopped.
+  // Set under mMutex on the owning thread. Accessed under one of the two.
+  gfx::IntSize mImageSize = gfx::IntSize(0, 0);
+
+  // The current settings of this source.
+  // Note that these may be different from the settings of the underlying device
+  // since we scale frames to avoid fingerprinting.
+  // Members are main thread only.
+  const RefPtr<media::Refcountable<dom::MediaTrackSettings>> mSettings;
 
-  dom::MediaSourceEnum mMediaSource; // source of media (camera | application | screen)
-  mozilla::camera::CaptureEngine mCapEngine;
+  // The capability currently chosen by constraints of the user of this source.
+  // Set under mMutex on the owning thread. Accessed under one of the two.
+  webrtc::CaptureCapability mCapability;
 
-  // To only restart camera when needed, we keep track previous settings.
-  webrtc::CaptureCapability mLastCapability;
-  bool mScary;
+  /**
+   * Capabilities that we choose between when applying constraints.
+   *
+   * This is mutable so that the const method NumCapabilities() can reset it.
+   * Owning thread only.
+   */
+  mutable nsTArray<webrtc::CaptureCapability> mHardcodedCapabilities;
+
+  nsString mDeviceName;
+  nsCString mUniqueId;
+  nsString mFacingMode;
+
+  // Whether init has successfully completed.
+  // Set in Init(), reset in Shutdown().
+  // Owning thread only.
+  bool mInitDone = false;
 };
 
 }
 
 #endif /* MEDIAENGINE_REMOTE_VIDEO_SOURCE_H_ */
new file mode 100644
--- /dev/null
+++ b/dom/media/webrtc/MediaEngineSource.cpp
@@ -0,0 +1,75 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaEngineSource.h"
+
+#include "mozilla/dom/MediaTrackSettingsBinding.h"
+
+namespace mozilla {
+
+// These need a definition somewhere because template
+// code is allowed to take their address, and they aren't
+// guaranteed to have one without this.
+const unsigned int MediaEngineSource::kMaxDeviceNameLength;
+const unsigned int MediaEngineSource::kMaxUniqueIdLength;
+
+/* static */ bool
+MediaEngineSource::IsVideo(dom::MediaSourceEnum aSource)
+{
+  switch (aSource) {
+    case MediaSourceEnum::Camera:
+    case MediaSourceEnum::Screen:
+    case MediaSourceEnum::Application:
+    case MediaSourceEnum::Window:
+    case MediaSourceEnum::Browser:
+      return true;
+    case MediaSourceEnum::Microphone:
+    case MediaSourceEnum::AudioCapture:
+      return false;
+    default:
+      MOZ_ASSERT_UNREACHABLE("Unknown type");
+      return false;
+  }
+}
+
+bool
+MediaEngineSource::RequiresSharing() const
+{
+  return false;
+}
+
+bool
+MediaEngineSource::IsFake() const
+{
+  return false;
+}
+
+bool
+MediaEngineSource::GetScary() const
+{
+  return false;
+}
+
+void
+MediaEngineSource::Shutdown()
+{
+}
+
+nsresult
+MediaEngineSource::TakePhoto(MediaEnginePhotoCallback* aCallback)
+{
+  return NS_ERROR_NOT_IMPLEMENTED;
+}
+
+void
+MediaEngineSource::GetSettings(dom::MediaTrackSettings& aOutSettings) const
+{
+  dom::MediaTrackSettings empty;
+  aOutSettings = empty;
+}
+
+MediaEngineSource::~MediaEngineSource() = default;
+
+} // namespace mozilla
new file mode 100644
--- /dev/null
+++ b/dom/media/webrtc/MediaEngineSource.h
@@ -0,0 +1,275 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MediaEngineSource_h
+#define MediaEngineSource_h
+
+#include "MediaSegment.h"
+#include "MediaTrackConstraints.h"
+#include "mozilla/dom/MediaStreamTrackBinding.h"
+#include "mozilla/media/MediaUtils.h"
+#include "mozilla/RefPtr.h"
+#include "mozilla/ThreadSafeWeakPtr.h"
+#include "nsStringFwd.h"
+#include "TrackID.h"
+
+namespace mozilla {
+
+namespace dom {
+class Blob;
+struct MediaTrackSettings;
+} // namespace dom
+
+namespace ipc {
+class PrincipalInfo;
+} // namespace ipc
+
+class AllocationHandle;
+class MediaEnginePhotoCallback;
+class MediaEnginePrefs;
+class SourceMediaStream;
+
+/**
+ * Callback interface for TakePhoto(). Either PhotoComplete() or PhotoError()
+ * should be called.
+ */
+class MediaEnginePhotoCallback {
+public:
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaEnginePhotoCallback)
+
+  // aBlob is the image captured by MediaEngineSource. It is
+  // called on main thread.
+  virtual nsresult PhotoComplete(already_AddRefed<dom::Blob> aBlob) = 0;
+
+  // It is called on main thread. aRv is the error code.
+  virtual nsresult PhotoError(nsresult aRv) = 0;
+
+protected:
+  virtual ~MediaEnginePhotoCallback() {}
+};
+
+/**
+ * Lifecycle state of MediaEngineSource.
+ */
+enum MediaEngineSourceState {
+  kAllocated, // Allocated, not yet started.
+  kStarted, // Previously allocated or stopped, then started.
+  kStopped, // Previously started, then stopped.
+  kReleased // Not allocated.
+};
+
+/**
+ * The pure interface of a MediaEngineSource.
+ *
+ * Most sources are helped by the defaults implemented in MediaEngineSource.
+ */
+class MediaEngineSourceInterface {
+public:
+  /**
+   * Returns true if this source requires sharing to support multiple
+   * allocations.
+   *
+   * If this returns true, the MediaEngine is expected to do subsequent
+   * allocations on the first instance of this source.
+   *
+   * If this returns false, the MediaEngine is expected to instantiate one
+   * source instance per allocation.
+   *
+   * Sharing means that the source gets multiple simultaneous calls to
+   * Allocate(), Start(), Stop(), Deallocate(), etc. These are all keyed off
+   * the AllocationHandle returned by Allocate() so the source can keep
+   * allocations apart.
+   *
+   * A source typically requires sharing when the underlying hardware doesn't
+   * allow multiple users, or when having multiple users would be inefficient.
+   */
+  virtual bool RequiresSharing() const = 0;
+
+  /**
+   * Return true if this is a fake source. I.e., if it is generating media
+   * itself rather than being an interface to underlying hardware.
+   */
+  virtual bool IsFake() const = 0;
+
+  /**
+   * Returns true if this source is available to allocate.
+   */
+  virtual bool IsAvailable() const = 0;
+
+  /**
+   * Gets the human readable name of this device.
+   */
+  virtual nsString GetName() const = 0;
+
+  /**
+   * Gets the UUID of this device.
+   */
+  virtual nsCString GetUUID() const = 0;
+
+  /**
+   * Get the enum describing the underlying type of MediaSource.
+   */
+  virtual dom::MediaSourceEnum GetMediaSource() const = 0;
+
+  /**
+   * Override w/true if source does end-run around cross origin restrictions.
+   */
+  virtual bool GetScary() const = 0;
+
+  /**
+   * Called by MediaEngine to allocate a handle to this source.
+   *
+   * If this is the first registered AllocationHandle, the underlying device
+   * will be allocated.
+   *
+   * Note that the AllocationHandle may be nullptr at the discretion of the
+   * MediaEngineSource implementation. Any user is to treat it as an opaque
+   * object.
+   */
+  virtual nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
+                            const MediaEnginePrefs &aPrefs,
+                            const nsString& aDeviceId,
+                            const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
+                            AllocationHandle** aOutHandle,
+                            const char** aOutBadConstraint) = 0;
+
+  /**
+   * Start the device and add the track to the provided SourceMediaStream, with
+   * the provided TrackID. You may start appending data to the track
+   * immediately after.
+   */
+  virtual nsresult Start(SourceMediaStream*, TrackID, const PrincipalHandle&) = 0;
+
+  /**
+   * Applies new constraints to the capability selection for the underlying
+   * device.
+   *
+   * Should the constraints lead to choosing a new capability while the device
+   * is actively being captured, the device will restart using the new
+   * capability.
+   */
+  virtual nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                               const dom::MediaTrackConstraints& aConstraints,
+                               const MediaEnginePrefs& aPrefs,
+                               const nsString& aDeviceId,
+                               const char** aOutBadConstraint) = 0;
+
+  /**
+   * Stop the device and release the corresponding MediaStream.
+   */
+  virtual nsresult Stop(SourceMediaStream *aSource, TrackID aID) = 0;
+
+  /**
+   * Called by MediaEngine to deallocate a handle to this source.
+   *
+   * If this was the last registered AllocationHandle, the underlying device
+   * will be deallocated.
+   */
+  virtual nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) = 0;
+
+  /**
+   * Called by MediaEngine when it knows this MediaEngineSource won't be used
+   * anymore. Use it to clean up anything that needs to be cleaned up.
+   */
+  virtual void Shutdown() = 0;
+
+  /**
+   * If implementation of MediaEngineSource supports TakePhoto(), the picture
+   * should be returned via aCallback object. Otherwise, it returns NS_ERROR_NOT_IMPLEMENTED.
+   */
+  virtual nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) = 0;
+
+  /**
+   * GetBestFitnessDistance returns the best distance the capture device can offer
+   * as a whole, given an accumulated number of ConstraintSets.
+   * Ideal values are considered in the first ConstraintSet only.
+   * Plain values are treated as Ideal in the first ConstraintSet.
+   * Plain values are treated as Exact in subsequent ConstraintSets.
+   * Infinity = UINT32_MAX e.g. device cannot satisfy accumulated ConstraintSets.
+   * A finite result may be used to calculate this device's ranking as a choice.
+   */
+  virtual uint32_t GetBestFitnessDistance(
+      const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
+      const nsString& aDeviceId) const = 0;
+
+  /**
+   * Returns the current settings of the underlying device.
+   *
+   * Note that this might not be the settings of the underlying hardware.
+   * In case of a camera where we intervene and scale frames to avoid
+   * leaking information from other documents than the current one,
+   * GetSettings() will return the scaled resolution. I.e., the
+   * device settings as seen by js.
+   */
+  virtual void GetSettings(dom::MediaTrackSettings& aOutSettings) const = 0;
+
+  /**
+   * Pulls data from the MediaEngineSource into the track.
+   *
+   * Driven by MediaStreamListener::NotifyPull.
+   */
+  virtual void Pull(const RefPtr<const AllocationHandle>& aHandle,
+                    const RefPtr<SourceMediaStream>& aStream,
+                    TrackID aTrackID,
+                    StreamTime aDesiredTime,
+                    const PrincipalHandle& aPrincipalHandle) = 0;
+};
+
+/**
+ * Abstract base class for MediaEngineSources.
+ *
+ * Implements defaults for some common MediaEngineSourceInterface methods below.
+ * Also implements RefPtr support and an owning-thread model for thread safety
+ * checks in subclasses.
+ */
+class MediaEngineSource : public MediaEngineSourceInterface {
+public:
+
+  // code inside webrtc.org assumes these sizes; don't use anything smaller
+  // without verifying it's ok
+  static const unsigned int kMaxDeviceNameLength = 128;
+  static const unsigned int kMaxUniqueIdLength = 256;
+
+  /**
+   * Returns true if the given source type is for video, false otherwise.
+   * Only call with real types.
+   */
+  static bool IsVideo(dom::MediaSourceEnum aSource);
+
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaEngineSource)
+  NS_DECL_OWNINGTHREAD
+
+  void AssertIsOnOwningThread() const
+  {
+    NS_ASSERT_OWNINGTHREAD(MediaEngineSource);
+  }
+
+  // No sharing required by default.
+  bool RequiresSharing() const override;
+
+  // Not fake by default.
+  bool IsFake() const override;
+
+  // Not scary by default.
+  bool GetScary() const override;
+
+  // Shutdown does nothing by default.
+  void Shutdown() override;
+
+  // TakePhoto returns NS_ERROR_NOT_IMPLEMENTED by default,
+  // to tell the caller to fallback to other methods.
+  nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override;
+
+  // Makes aOutSettings empty by default.
+  void GetSettings(dom::MediaTrackSettings& aOutSettings) const override;
+
+protected:
+  virtual ~MediaEngineSource();
+};
+
+} // namespace mozilla
+
+#endif /* MediaEngineSource_h */
--- a/dom/media/webrtc/MediaEngineTabVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.cpp
@@ -12,53 +12,47 @@
 #include "mozilla/dom/BindingDeclarations.h"
 #include "nsGlobalWindow.h"
 #include "nsIDOMClientRect.h"
 #include "nsIDocShell.h"
 #include "nsIPresShell.h"
 #include "nsPresContext.h"
 #include "gfxContext.h"
 #include "gfx2DGlue.h"
+#include "AllocationHandle.h"
 #include "ImageContainer.h"
 #include "Layers.h"
 #include "nsIInterfaceRequestorUtils.h"
 #include "nsIDOMDocument.h"
 #include "nsITabSource.h"
 #include "VideoUtils.h"
 #include "nsServiceManagerUtils.h"
 #include "nsIPrefService.h"
 #include "MediaTrackConstraints.h"
 
 namespace mozilla {
 
 using namespace mozilla::gfx;
 
-NS_IMPL_ISUPPORTS(MediaEngineTabVideoSource, nsIDOMEventListener, nsITimerCallback)
-
 MediaEngineTabVideoSource::MediaEngineTabVideoSource()
-  : mBufWidthMax(0)
-  , mBufHeightMax(0)
-  , mWindowId(0)
-  , mScrollWithPage(false)
-  , mViewportOffsetX(0)
-  , mViewportOffsetY(0)
-  , mViewportWidth(0)
-  , mViewportHeight(0)
-  , mTimePerFrame(0)
-  , mDataSize(0)
-  , mBlackedoutWindow(false)
-  , mMonitor("MediaEngineTabVideoSource") {}
+  : mMutex("MediaEngineTabVideoSource::mMutex") {}
 
 nsresult
 MediaEngineTabVideoSource::StartRunnable::Run()
 {
   mVideoSource->Draw();
-  NS_NewTimerWithCallback(getter_AddRefs(mVideoSource->mTimer),
-                          mVideoSource, mVideoSource->mTimePerFrame,
-                          nsITimer::TYPE_REPEATING_SLACK);
+  mVideoSource->mTimer->InitWithNamedFuncCallback(
+      [](nsITimer* aTimer, void* aClosure) mutable {
+        auto source = static_cast<MediaEngineTabVideoSource*>(aClosure);
+        source->Draw();
+      },
+      mVideoSource,
+      mVideoSource->mTimePerFrame,
+      nsITimer::TYPE_REPEATING_SLACK,
+      "MediaEngineTabVideoSource DrawTimer");
   if (mVideoSource->mTabSource) {
     mVideoSource->mTabSource->NotifyStreamStart(mVideoSource->mWindow);
   }
   return NS_OK;
 }
 
 nsresult
 MediaEngineTabVideoSource::StopRunnable::Run()
@@ -68,28 +62,16 @@ MediaEngineTabVideoSource::StopRunnable:
     mVideoSource->mTimer = nullptr;
   }
   if (mVideoSource->mTabSource) {
     mVideoSource->mTabSource->NotifyStreamStop(mVideoSource->mWindow);
   }
   return NS_OK;
 }
 
-NS_IMETHODIMP
-MediaEngineTabVideoSource::HandleEvent(nsIDOMEvent *event) {
-  Draw();
-  return NS_OK;
-}
-
-NS_IMETHODIMP
-MediaEngineTabVideoSource::Notify(nsITimer*) {
-  Draw();
-  return NS_OK;
-}
-
 nsresult
 MediaEngineTabVideoSource::InitRunnable::Run()
 {
   if (mVideoSource->mWindowId != -1) {
     nsGlobalWindowOuter* globalWindow =
       nsGlobalWindowOuter::GetOuterWindowWithId(mVideoSource->mWindowId);
     if (!globalWindow) {
       // We can't access the window, just send a blacked out screen.
@@ -112,79 +94,84 @@ MediaEngineTabVideoSource::InitRunnable:
     rv = mVideoSource->mTabSource->GetTabToStream(getter_AddRefs(win));
     NS_ENSURE_SUCCESS(rv, rv);
     if (!win)
       return NS_OK;
 
     mVideoSource->mWindow = nsPIDOMWindowOuter::From(win);
     MOZ_ASSERT(mVideoSource->mWindow);
   }
+  mVideoSource->mTimer = NS_NewTimer();
   nsCOMPtr<nsIRunnable> start(new StartRunnable(mVideoSource));
   start->Run();
   return NS_OK;
 }
 
 nsresult
 MediaEngineTabVideoSource::DestroyRunnable::Run()
 {
   MOZ_ASSERT(NS_IsMainThread());
 
   mVideoSource->mWindow = nullptr;
   mVideoSource->mTabSource = nullptr;
 
   return NS_OK;
 }
 
-void
-MediaEngineTabVideoSource::GetName(nsAString& aName) const
+nsString
+MediaEngineTabVideoSource::GetName() const
 {
-  aName.AssignLiteral(u"&getUserMedia.videoSource.tabShare;");
+  return NS_LITERAL_STRING(u"&getUserMedia.videoSource.tabShare;");
 }
 
-void
-MediaEngineTabVideoSource::GetUUID(nsACString& aUuid) const
+nsCString
+MediaEngineTabVideoSource::GetUUID() const
 {
-  aUuid.AssignLiteral("tab");
+  return NS_LITERAL_CSTRING("tab");
 }
 
 #define DEFAULT_TABSHARE_VIDEO_MAX_WIDTH 4096
 #define DEFAULT_TABSHARE_VIDEO_MAX_HEIGHT 4096
 #define DEFAULT_TABSHARE_VIDEO_FRAMERATE 30
 
 nsresult
 MediaEngineTabVideoSource::Allocate(const dom::MediaTrackConstraints& aConstraints,
                                     const MediaEnginePrefs& aPrefs,
                                     const nsString& aDeviceId,
                                     const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
                                     AllocationHandle** aOutHandle,
                                     const char** aOutBadConstraint)
 {
+  AssertIsOnOwningThread();
+
   // windowId is not a proper constraint, so just read it.
   // It has no well-defined behavior in advanced, so ignore it there.
 
   mWindowId = aConstraints.mBrowserWindow.WasPassed() ?
               aConstraints.mBrowserWindow.Value() : -1;
   *aOutHandle = nullptr;
 
   {
-    MonitorAutoLock mon(mMonitor);
+    MutexAutoLock lock(mMutex);
     mState = kAllocated;
   }
 
-  return Restart(nullptr, aConstraints, aPrefs, aDeviceId, aOutBadConstraint);
+  return Reconfigure(nullptr, aConstraints, aPrefs, aDeviceId, aOutBadConstraint);
 }
 
 nsresult
-MediaEngineTabVideoSource::Restart(AllocationHandle* aHandle,
-                                   const dom::MediaTrackConstraints& aConstraints,
-                                   const mozilla::MediaEnginePrefs& aPrefs,
-                                   const nsString& aDeviceId,
-                                   const char** aOutBadConstraint)
+MediaEngineTabVideoSource::Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                                       const dom::MediaTrackConstraints& aConstraints,
+                                       const mozilla::MediaEnginePrefs& aPrefs,
+                                       const nsString& aDeviceId,
+                                       const char** aOutBadConstraint)
 {
+  AssertIsOnOwningThread();
   MOZ_ASSERT(!aHandle);
+  MOZ_ASSERT(mState != kReleased);
 
   // scrollWithPage is not proper a constraint, so just read it.
   // It has no well-defined behavior in advanced, so ignore it there.
 
   mScrollWithPage = aConstraints.mScrollWithPage.WasPassed() ?
                     aConstraints.mScrollWithPage.Value() : false;
 
   FlattenedConstraints c(aConstraints);
@@ -199,72 +186,98 @@ MediaEngineTabVideoSource::Restart(Alloc
     mViewportOffsetY = c.mViewportOffsetY.Get(0);
     mViewportWidth = c.mViewportWidth.Get(INT32_MAX);
     mViewportHeight = c.mViewportHeight.Get(INT32_MAX);
   }
   return NS_OK;
 }
 
 nsresult
-MediaEngineTabVideoSource::Deallocate(AllocationHandle* aHandle)
+MediaEngineTabVideoSource::Deallocate(const RefPtr<const AllocationHandle>& aHandle)
 {
+  AssertIsOnOwningThread();
   MOZ_ASSERT(!aHandle);
+  MOZ_ASSERT(mState == kAllocated || mState == kStopped);
+
+  MOZ_ASSERT(IsTrackIDExplicit(mTrackID));
+
   NS_DispatchToMainThread(do_AddRef(new DestroyRunnable(this)));
 
   {
-    MonitorAutoLock mon(mMonitor);
+    MutexAutoLock lock(mMutex);
     mState = kReleased;
   }
+
   return NS_OK;
 }
 
 nsresult
-MediaEngineTabVideoSource::Start(SourceMediaStream* aStream, TrackID aID,
+MediaEngineTabVideoSource::Start(SourceMediaStream* aStream,
+                                 TrackID aTrackID,
                                  const PrincipalHandle& aPrincipalHandle)
 {
+  AssertIsOnOwningThread();
+  MOZ_ASSERT(mState == kAllocated);
+
+  MOZ_ASSERT(!mStream);
+  MOZ_ASSERT(mTrackID == TRACK_NONE);
+  MOZ_ASSERT(aStream);
+  MOZ_ASSERT(IsTrackIDExplicit(aTrackID));
+
   nsCOMPtr<nsIRunnable> runnable;
-  if (!mWindow)
+  if (!mWindow) {
     runnable = new InitRunnable(this);
-  else
+  } else {
     runnable = new StartRunnable(this);
+  }
   NS_DispatchToMainThread(runnable);
-  aStream->AddTrack(aID, 0, new VideoSegment());
+  mStream = aStream;
+  mTrackID = aTrackID;
+  mStream->AddTrack(mTrackID, 0, new VideoSegment());
 
   {
-    MonitorAutoLock mon(mMonitor);
+    MutexAutoLock lock(mMutex);
     mState = kStarted;
   }
 
   return NS_OK;
 }
 
 void
-MediaEngineTabVideoSource::NotifyPull(MediaStreamGraph*,
-                                      SourceMediaStream* aSource,
-                                      TrackID aID, StreamTime aDesiredTime,
-                                      const PrincipalHandle& aPrincipalHandle)
+MediaEngineTabVideoSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
+                                const RefPtr<SourceMediaStream>& aStream,
+                                TrackID aTrackID,
+                                StreamTime aDesiredTime,
+                                const PrincipalHandle& aPrincipalHandle)
 {
   VideoSegment segment;
-  MonitorAutoLock mon(mMonitor);
-  if (mState != kStarted) {
-    return;
+  RefPtr<layers::Image> image;
+
+  {
+    MutexAutoLock lock(mMutex);
+    if (mState == kReleased) {
+      // We end the track before setting the state to released.
+      return;
+    }
+    image = mImage;
   }
 
   // Note: we're not giving up mImage here
-  RefPtr<layers::SourceSurfaceImage> image = mImage;
-  StreamTime delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
-  if (delta > 0) {
-    // nullptr images are allowed
-    gfx::IntSize size = image ? image->GetSize() : IntSize(0, 0);
-    segment.AppendFrame(image.forget().downcast<layers::Image>(), delta, size,
-                        aPrincipalHandle);
-    // This can fail if either a) we haven't added the track yet, or b)
-    // we've removed or finished the track.
-    aSource->AppendToTrack(aID, &(segment));
+  StreamTime delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
+  if (delta <= 0) {
+    return;
   }
+
+  // nullptr images are allowed
+  gfx::IntSize size = image ? image->GetSize() : IntSize(0, 0);
+  segment.AppendFrame(image.forget(), delta, size,
+                      aPrincipalHandle);
+  // This can fail if either a) we haven't added the track yet, or b)
+  // we've removed or ended the track.
+  aStream->AppendToTrack(aTrackID, &(segment));
 }
 
 void
 MediaEngineTabVideoSource::Draw() {
   if (!mWindow && !mBlackedoutWindow) {
     return;
   }
 
@@ -282,17 +295,17 @@ MediaEngineTabVideoSource::Draw() {
     mViewportWidth = 640;
     mViewportHeight = 480;
   }
 
   IntSize size;
   {
     float pixelRatio;
     if (mWindow) {
-      pixelRatio = mWindow->GetDevicePixelRatio(CallerType::System);
+      pixelRatio = mWindow->GetDevicePixelRatio(dom::CallerType::System);
     } else {
       pixelRatio = 1.0f;
     }
     const int32_t deviceWidth = (int32_t)(pixelRatio * mViewportWidth);
     const int32_t deviceHeight = (int32_t)(pixelRatio * mViewportHeight);
 
     if ((deviceWidth <= mBufWidthMax) && (deviceHeight <= mBufHeightMax)) {
       size = IntSize(deviceWidth, deviceHeight);
@@ -363,39 +376,40 @@ MediaEngineTabVideoSource::Draw() {
 
   RefPtr<SourceSurface> surface = dt->Snapshot();
   if (!surface) {
     return;
   }
 
   RefPtr<layers::SourceSurfaceImage> image = new layers::SourceSurfaceImage(size, surface);
 
-  MonitorAutoLock mon(mMonitor);
+  MutexAutoLock lock(mMutex);
   mImage = image;
 }
 
 nsresult
-MediaEngineTabVideoSource::Stop(mozilla::SourceMediaStream* aSource,
-                                mozilla::TrackID aID)
+MediaEngineTabVideoSource::Stop(SourceMediaStream* aStream,
+                                TrackID aTrackID)
 {
+  AssertIsOnOwningThread();
+  MOZ_ASSERT(mState == kStarted);
+  MOZ_ASSERT(mStream == aStream);
+  MOZ_ASSERT(mTrackID == aTrackID);
+
   // If mBlackedoutWindow is true, we may be running
   // despite mWindow == nullptr.
   if (!mWindow && !mBlackedoutWindow) {
     return NS_OK;
   }
 
   NS_DispatchToMainThread(new StopRunnable(this));
 
   {
-    MonitorAutoLock mon(mMonitor);
+    MutexAutoLock lock(mMutex);
     mState = kStopped;
-    aSource->EndTrack(aID);
+    mStream->EndTrack(mTrackID);
+    mStream = nullptr;
+    mTrackID = TRACK_NONE;
   }
   return NS_OK;
 }
 
-bool
-MediaEngineTabVideoSource::IsFake()
-{
-  return false;
 }
-
-}
--- a/dom/media/webrtc/MediaEngineTabVideoSource.h
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.h
@@ -1,125 +1,141 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include "nsIDOMEventListener.h"
 #include "MediaEngine.h"
 #include "ImageContainer.h"
 #include "nsITimer.h"
-#include "mozilla/Monitor.h"
+#include "mozilla/Mutex.h"
 #include "mozilla/UniquePtr.h"
 #include "nsITabSource.h"
 
 namespace mozilla {
 
-class MediaEngineTabVideoSource : public MediaEngineVideoSource, nsIDOMEventListener, nsITimerCallback {
-  public:
-    NS_DECL_THREADSAFE_ISUPPORTS
-    NS_DECL_NSIDOMEVENTLISTENER
-    NS_DECL_NSITIMERCALLBACK
-    MediaEngineTabVideoSource();
+class MediaEngineTabVideoSource : public MediaEngineSource
+{
+public:
+  MediaEngineTabVideoSource();
+
+  nsString GetName() const override;
+  nsCString GetUUID() const override;
 
-    void GetName(nsAString&) const override;
-    void GetUUID(nsACString&) const override;
+  bool IsAvailable() const override
+  {
+    AssertIsOnOwningThread();
+    return mState == kReleased;
+  }
 
-    bool GetScary() const override {
-      return true;
-    }
+  bool GetScary() const override
+  {
+    return true;
+  }
 
-    nsresult Allocate(const dom::MediaTrackConstraints &,
-                      const mozilla::MediaEnginePrefs&,
-                      const nsString& aDeviceId,
-                      const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
-                      AllocationHandle** aOutHandle,
-                      const char** aOutBadConstraint) override;
-    nsresult Deallocate(AllocationHandle* aHandle) override;
-    nsresult Start(mozilla::SourceMediaStream*, mozilla::TrackID, const mozilla::PrincipalHandle&) override;
-    void NotifyPull(mozilla::MediaStreamGraph*, mozilla::SourceMediaStream*, mozilla::TrackID, mozilla::StreamTime, const mozilla::PrincipalHandle& aPrincipalHandle) override;
-    nsresult Stop(mozilla::SourceMediaStream*, mozilla::TrackID) override;
-    nsresult Restart(AllocationHandle* aHandle,
-                     const dom::MediaTrackConstraints& aConstraints,
-                     const mozilla::MediaEnginePrefs& aPrefs,
-                     const nsString& aDeviceId,
-                     const char** aOutBadConstraint) override;
-    bool IsFake() override;
-    dom::MediaSourceEnum GetMediaSource() const override {
-      return dom::MediaSourceEnum::Browser;
-    }
-    uint32_t GetBestFitnessDistance(
-      const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
-      const nsString& aDeviceId) const override
-    {
-      return 0;
-    }
+  dom::MediaSourceEnum GetMediaSource() const override
+  {
+    return dom::MediaSourceEnum::Browser;
+  }
+
+  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
+                    const MediaEnginePrefs &aPrefs,
+                    const nsString& aDeviceId,
+                    const ipc::PrincipalInfo& aPrincipalInfo,
+                    AllocationHandle** aOutHandle,
+                    const char** aOutBadConstraint) override;
+  nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
+  nsresult Start(SourceMediaStream*, TrackID, const PrincipalHandle&) override;
+  nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                       const dom::MediaTrackConstraints& aConstraints,
+                       const MediaEnginePrefs& aPrefs,
+                       const nsString& aDeviceId,
+                       const char** aOutBadConstraint) override;
+  nsresult Stop(SourceMediaStream*, TrackID) override;
+
+  void Pull(const RefPtr<const AllocationHandle>& aHandle,
+            const RefPtr<SourceMediaStream>& aStream,
+            TrackID aTrackID,
+            StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipalHandle) override;
 
-    nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
-    {
-      return NS_ERROR_NOT_IMPLEMENTED;
-    }
+  uint32_t GetBestFitnessDistance(
+    const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
+    const nsString& aDeviceId) const override
+  {
+    return 0;
+  }
 
-    void Draw();
+  void Draw();
 
-    class StartRunnable : public Runnable {
-    public:
-      explicit StartRunnable(MediaEngineTabVideoSource *videoSource)
-        : Runnable("MediaEngineTabVideoSource::StartRunnable")
-        , mVideoSource(videoSource)
-      {}
-      NS_IMETHOD Run() override;
-      RefPtr<MediaEngineTabVideoSource> mVideoSource;
-    };
+  class StartRunnable : public Runnable {
+  public:
+    explicit StartRunnable(MediaEngineTabVideoSource *videoSource)
+      : Runnable("MediaEngineTabVideoSource::StartRunnable")
+      , mVideoSource(videoSource)
+    {}
+    NS_IMETHOD Run() override;
+    RefPtr<MediaEngineTabVideoSource> mVideoSource;
+  };
 
-    class StopRunnable : public Runnable {
-    public:
-      explicit StopRunnable(MediaEngineTabVideoSource *videoSource)
-        : Runnable("MediaEngineTabVideoSource::StopRunnable")
-        , mVideoSource(videoSource)
-      {}
-      NS_IMETHOD Run() override;
-      RefPtr<MediaEngineTabVideoSource> mVideoSource;
-    };
+  class StopRunnable : public Runnable {
+  public:
+    explicit StopRunnable(MediaEngineTabVideoSource *videoSource)
+      : Runnable("MediaEngineTabVideoSource::StopRunnable")
+      , mVideoSource(videoSource)
+    {}
+    NS_IMETHOD Run() override;
+    RefPtr<MediaEngineTabVideoSource> mVideoSource;
+  };
 
-    class InitRunnable : public Runnable {
-    public:
-      explicit InitRunnable(MediaEngineTabVideoSource *videoSource)
-        : Runnable("MediaEngineTabVideoSource::InitRunnable")
-        , mVideoSource(videoSource)
-      {}
-      NS_IMETHOD Run() override;
-      RefPtr<MediaEngineTabVideoSource> mVideoSource;
-    };
+  class InitRunnable : public Runnable {
+  public:
+    explicit InitRunnable(MediaEngineTabVideoSource *videoSource)
+      : Runnable("MediaEngineTabVideoSource::InitRunnable")
+      , mVideoSource(videoSource)
+    {}
+    NS_IMETHOD Run() override;
+    RefPtr<MediaEngineTabVideoSource> mVideoSource;
+  };
 
-    class DestroyRunnable : public Runnable {
-    public:
-      explicit DestroyRunnable(MediaEngineTabVideoSource* videoSource)
-        : Runnable("MediaEngineTabVideoSource::DestroyRunnable")
-        , mVideoSource(videoSource)
-      {}
-      NS_IMETHOD Run() override;
-      RefPtr<MediaEngineTabVideoSource> mVideoSource;
-    };
+  class DestroyRunnable : public Runnable {
+  public:
+    explicit DestroyRunnable(MediaEngineTabVideoSource* videoSource)
+      : Runnable("MediaEngineTabVideoSource::DestroyRunnable")
+      , mVideoSource(videoSource)
+    {}
+    NS_IMETHOD Run() override;
+    RefPtr<MediaEngineTabVideoSource> mVideoSource;
+  };
 
 protected:
-    ~MediaEngineTabVideoSource() {}
+  ~MediaEngineTabVideoSource() {}
 
 private:
-    int32_t mBufWidthMax;
-    int32_t mBufHeightMax;
-    int64_t mWindowId;
-    bool mScrollWithPage;
-    int32_t mViewportOffsetX;
-    int32_t mViewportOffsetY;
-    int32_t mViewportWidth;
-    int32_t mViewportHeight;
-    int32_t mTimePerFrame;
-    UniquePtr<unsigned char[]> mData;
-    size_t mDataSize;
-    nsCOMPtr<nsPIDOMWindowOuter> mWindow;
-    // If this is set, we will run despite mWindow == nullptr.
-    bool mBlackedoutWindow;
-    RefPtr<layers::SourceSurfaceImage> mImage;
-    nsCOMPtr<nsITimer> mTimer;
-    Monitor mMonitor;
-    nsCOMPtr<nsITabSource> mTabSource;
-  };
-}
+  int32_t mBufWidthMax = 0;
+  int32_t mBufHeightMax = 0;
+  int64_t mWindowId = 0;
+  bool mScrollWithPage = 0;
+  int32_t mViewportOffsetX = 0;
+  int32_t mViewportOffsetY = 0;
+  int32_t mViewportWidth = 0;
+  int32_t mViewportHeight = 0;
+  int32_t mTimePerFrame = 0;
+  UniquePtr<unsigned char[]> mData;
+  size_t mDataSize = 0;
+  nsCOMPtr<nsPIDOMWindowOuter> mWindow;
+  // If this is set, we will run despite mWindow == nullptr.
+  bool mBlackedoutWindow = false;
+  // Current state of this source.
+  // Written on owning thread *and* under mMutex.
+  // Can be read on owning thread *or* under mMutex.
+  MediaEngineSourceState mState = kReleased;
+  // mStream and mTrackID are set in SetSource() to keep track of what to end
+  // in Deallocate().
+  // Owning thread only.
+  RefPtr<SourceMediaStream> mStream;
+  TrackID mTrackID = TRACK_NONE;
+  RefPtr<layers::SourceSurfaceImage> mImage;
+  nsCOMPtr<nsITimer> mTimer;
+  Mutex mMutex;
+  nsCOMPtr<nsITabSource> mTabSource;
+};
+
+} // namespace mozilla
--- a/dom/media/webrtc/MediaEngineWebRTC.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTC.cpp
@@ -1,38 +1,35 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set sw=2 ts=8 et ft=cpp : */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
+#include "MediaEngineWebRTC.h"
+
+#include "AllocationHandle.h"
+#include "CamerasChild.h"
+#include "CSFLog.h"
+#include "MediaEngineTabVideoSource.h"
+#include "MediaEngineRemoteVideoSource.h"
+#include "MediaTrackConstraints.h"
+#include "mozilla/Logging.h"
+#include "nsIComponentRegistrar.h"
 #include "nsIPrefService.h"
 #include "nsIPrefBranch.h"
-
-#include "CSFLog.h"
+#include "nsITabSource.h"
 #include "prenv.h"
 
-#include "mozilla/Logging.h"
+#ifdef MOZ_WIDGET_ANDROID
+#include "AndroidBridge.h"
+#include "VideoEngine.h"
+#endif
 
 static mozilla::LazyLogModule sGetUserMediaLog("GetUserMedia");
-
-#include "MediaEngineWebRTC.h"
-#include "ImageContainer.h"
-#include "nsIComponentRegistrar.h"
-#include "MediaEngineTabVideoSource.h"
-#include "MediaEngineRemoteVideoSource.h"
-#include "CamerasChild.h"
-#include "nsITabSource.h"
-#include "MediaTrackConstraints.h"
-
-#ifdef MOZ_WIDGET_ANDROID
-#include "VideoEngine.h"
-#include "AndroidBridge.h"
-#endif
-
 #undef LOG
 #define LOG(args) MOZ_LOG(sGetUserMediaLog, mozilla::LogLevel::Debug, args)
 
 namespace mozilla {
 
 // statics from AudioInputCubeb
 nsTArray<int>* AudioInputCubeb::mDeviceIndexes;
 int AudioInputCubeb::mDefaultDevice = -1;
@@ -101,17 +98,17 @@ void AudioInputCubeb::UpdateDeviceList()
   LOG(("Cubeb default input device %d", mDefaultDevice));
   StaticMutexAutoLock lock(sMutex);
   // swap state
   cubeb_device_collection_destroy(cubebContext, &mDevices);
   mDevices = devices;
 }
 
 MediaEngineWebRTC::MediaEngineWebRTC(MediaEnginePrefs &aPrefs)
-  : mMutex("mozilla::MediaEngineWebRTC"),
+  : mMutex("MediaEngineWebRTC::mMutex"),
     mAudioInput(nullptr),
     mFullDuplex(aPrefs.mFullDuplex),
     mDelayAgnostic(aPrefs.mDelayAgnostic),
     mExtendedFilter(aPrefs.mExtendedFilter),
     mHasTabVideoSource(false)
 {
   nsCOMPtr<nsIComponentRegistrar> compMgr;
   NS_GetComponentRegistrar(getter_AddRefs(compMgr));
@@ -127,235 +124,231 @@ MediaEngineWebRTC::MediaEngineWebRTC(Med
 void
 MediaEngineWebRTC::SetFakeDeviceChangeEvents()
 {
   camera::GetChildAndCall(
     &camera::CamerasChild::SetFakeDeviceChangeEvents);
 }
 
 void
-MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
-                                         nsTArray<RefPtr<MediaEngineVideoSource> >* aVSources)
+MediaEngineWebRTC::EnumerateDevices(dom::MediaSourceEnum aMediaSource,
+                                    nsTArray<RefPtr<MediaEngineSource> >* aSources)
 {
-  // We spawn threads to handle gUM runnables, so we must protect the member vars
-  MutexAutoLock lock(mMutex);
+  if (MediaEngineSource::IsVideo(aMediaSource)) {
+    // We spawn threads to handle gUM runnables, so we must protect the member vars
+    MutexAutoLock lock(mMutex);
 
-  mozilla::camera::CaptureEngine capEngine = mozilla::camera::InvalidEngine;
+    mozilla::camera::CaptureEngine capEngine = mozilla::camera::InvalidEngine;
 
 #ifdef MOZ_WIDGET_ANDROID
-  // get the JVM
-  JavaVM* jvm;
-  JNIEnv* const env = jni::GetEnvForThread();
-  MOZ_ALWAYS_TRUE(!env->GetJavaVM(&jvm));
-
-  if (!jvm || mozilla::camera::VideoEngine::SetAndroidObjects(jvm)) {
-    LOG(("VideoEngine::SetAndroidObjects Failed"));
-    return;
-  }
-#endif
-  bool scaryKind = false; // flag sources with cross-origin exploit potential
-
-  switch (aMediaSource) {
-    case dom::MediaSourceEnum::Window:
-      capEngine = mozilla::camera::WinEngine;
-      break;
-    case dom::MediaSourceEnum::Application:
-      capEngine = mozilla::camera::AppEngine;
-      break;
-    case dom::MediaSourceEnum::Screen:
-      capEngine = mozilla::camera::ScreenEngine;
-      scaryKind = true;
-      break;
-    case dom::MediaSourceEnum::Browser:
-      capEngine = mozilla::camera::BrowserEngine;
-      scaryKind = true;
-      break;
-    case dom::MediaSourceEnum::Camera:
-      capEngine = mozilla::camera::CameraEngine;
-      break;
-    default:
-      // BOOM
-      MOZ_CRASH("No valid video engine");
-      break;
-  }
+    // get the JVM
+    JavaVM* jvm;
+    JNIEnv* const env = jni::GetEnvForThread();
+    MOZ_ALWAYS_TRUE(!env->GetJavaVM(&jvm));
 
-  /**
-   * We still enumerate every time, in case a new device was plugged in since
-   * the last call. TODO: Verify that WebRTC actually does deal with hotplugging
-   * new devices (with or without new engine creation) and accordingly adjust.
-   * Enumeration is not neccessary if GIPS reports the same set of devices
-   * for a given instance of the engine. Likewise, if a device was plugged out,
-   * mVideoSources must be updated.
-   */
-  int num;
-  num = mozilla::camera::GetChildAndCall(
-    &mozilla::camera::CamerasChild::NumberOfCaptureDevices,
-    capEngine);
-
-  for (int i = 0; i < num; i++) {
-    char deviceName[MediaEngineSource::kMaxDeviceNameLength];
-    char uniqueId[MediaEngineSource::kMaxUniqueIdLength];
-    bool scarySource = false;
-
-    // paranoia
-    deviceName[0] = '\0';
-    uniqueId[0] = '\0';
-    int error;
-
-    error =  mozilla::camera::GetChildAndCall(
-      &mozilla::camera::CamerasChild::GetCaptureDevice,
-      capEngine,
-      i, deviceName,
-      sizeof(deviceName), uniqueId,
-      sizeof(uniqueId),
-      &scarySource);
-    if (error) {
-      LOG(("camera:GetCaptureDevice: Failed %d", error ));
-      continue;
-    }
-#ifdef DEBUG
-    LOG(("  Capture Device Index %d, Name %s", i, deviceName));
-
-    webrtc::CaptureCapability cap;
-    int numCaps = mozilla::camera::GetChildAndCall(
-      &mozilla::camera::CamerasChild::NumberOfCapabilities,
-      capEngine,
-      uniqueId);
-    LOG(("Number of Capabilities %d", numCaps));
-    for (int j = 0; j < numCaps; j++) {
-      if (mozilla::camera::GetChildAndCall(
-            &mozilla::camera::CamerasChild::GetCaptureCapability,
-            capEngine,
-            uniqueId,
-            j, cap) != 0) {
-       break;
-      }
-      LOG(("type=%d width=%d height=%d maxFPS=%d",
-           cap.rawType, cap.width, cap.height, cap.maxFPS ));
+    if (!jvm || mozilla::camera::VideoEngine::SetAndroidObjects(jvm)) {
+      LOG(("VideoEngine::SetAndroidObjects Failed"));
+      return;
     }
 #endif
+    bool scaryKind = false; // flag sources with cross-origin exploit potential
 
-    if (uniqueId[0] == '\0') {
-      // In case a device doesn't set uniqueId!
-      strncpy(uniqueId, deviceName, sizeof(uniqueId));
-      uniqueId[sizeof(uniqueId)-1] = '\0'; // strncpy isn't safe
+    switch (aMediaSource) {
+      case dom::MediaSourceEnum::Window:
+        capEngine = mozilla::camera::WinEngine;
+        break;
+      case dom::MediaSourceEnum::Application:
+        capEngine = mozilla::camera::AppEngine;
+        break;
+      case dom::MediaSourceEnum::Screen:
+        capEngine = mozilla::camera::ScreenEngine;
+        scaryKind = true;
+        break;
+      case dom::MediaSourceEnum::Browser:
+        capEngine = mozilla::camera::BrowserEngine;
+        scaryKind = true;
+        break;
+      case dom::MediaSourceEnum::Camera:
+        capEngine = mozilla::camera::CameraEngine;
+        break;
+      default:
+        MOZ_CRASH("No valid video engine");
+        break;
     }
 
-    RefPtr<MediaEngineVideoSource> vSource;
-    NS_ConvertUTF8toUTF16 uuid(uniqueId);
-    if (mVideoSources.Get(uuid, getter_AddRefs(vSource))) {
-      // We've already seen this device, just refresh and append.
-      static_cast<MediaEngineRemoteVideoSource*>(vSource.get())->Refresh(i);
-      aVSources->AppendElement(vSource.get());
-    } else {
-      vSource = new MediaEngineRemoteVideoSource(i, capEngine, aMediaSource,
-                                                 scaryKind || scarySource);
-      mVideoSources.Put(uuid, vSource); // Hashtable takes ownership.
-      aVSources->AppendElement(vSource);
+    /*
+     * We still enumerate every time, in case a new device was plugged in since
+     * the last call. TODO: Verify that WebRTC actually does deal with hotplugging
+     * new devices (with or without new engine creation) and accordingly adjust.
+     * Enumeration is not neccessary if GIPS reports the same set of devices
+     * for a given instance of the engine. Likewise, if a device was plugged out,
+     * mVideoSources must be updated.
+     */
+    int num;
+    num = mozilla::camera::GetChildAndCall(
+      &mozilla::camera::CamerasChild::NumberOfCaptureDevices,
+      capEngine);
+
+    for (int i = 0; i < num; i++) {
+      char deviceName[MediaEngineSource::kMaxDeviceNameLength];
+      char uniqueId[MediaEngineSource::kMaxUniqueIdLength];
+      bool scarySource = false;
+
+      // paranoia
+      deviceName[0] = '\0';
+      uniqueId[0] = '\0';
+      int error;
+
+      error =  mozilla::camera::GetChildAndCall(
+        &mozilla::camera::CamerasChild::GetCaptureDevice,
+        capEngine,
+        i, deviceName,
+        sizeof(deviceName), uniqueId,
+        sizeof(uniqueId),
+        &scarySource);
+      if (error) {
+        LOG(("camera:GetCaptureDevice: Failed %d", error ));
+        continue;
+      }
+#ifdef DEBUG
+      LOG(("  Capture Device Index %d, Name %s", i, deviceName));
+
+      webrtc::CaptureCapability cap;
+      int numCaps = mozilla::camera::GetChildAndCall(
+        &mozilla::camera::CamerasChild::NumberOfCapabilities,
+        capEngine,
+        uniqueId);
+      LOG(("Number of Capabilities %d", numCaps));
+      for (int j = 0; j < numCaps; j++) {
+        if (mozilla::camera::GetChildAndCall(
+              &mozilla::camera::CamerasChild::GetCaptureCapability,
+              capEngine,
+              uniqueId,
+              j, cap) != 0) {
+         break;
+        }
+        LOG(("type=%d width=%d height=%d maxFPS=%d",
+             cap.rawType, cap.width, cap.height, cap.maxFPS ));
+      }
+#endif
+
+      if (uniqueId[0] == '\0') {
+        // In case a device doesn't set uniqueId!
+        strncpy(uniqueId, deviceName, sizeof(uniqueId));
+        uniqueId[sizeof(uniqueId)-1] = '\0'; // strncpy isn't safe
+      }
+
+      NS_ConvertUTF8toUTF16 uuid(uniqueId);
+      RefPtr<MediaEngineSource> vSource = mVideoSources.Get(uuid);
+      if (vSource && vSource->RequiresSharing()) {
+        // We've already seen this shared device, just refresh and append.
+        static_cast<MediaEngineRemoteVideoSource*>(vSource.get())->Refresh(i);
+        aSources->AppendElement(vSource.get());
+      } else {
+        vSource = new MediaEngineRemoteVideoSource(i, capEngine, aMediaSource,
+                                                   scaryKind || scarySource);
+        mVideoSources.Put(uuid, vSource);
+        aSources->AppendElement(vSource);
+      }
     }
-  }
+
+    if (mHasTabVideoSource || dom::MediaSourceEnum::Browser == aMediaSource) {
+      aSources->AppendElement(new MediaEngineTabVideoSource());
+    }
+  } else {
+    // We spawn threads to handle gUM runnables, so we must protect the member vars
+    MutexAutoLock lock(mMutex);
+
+    if (aMediaSource == dom::MediaSourceEnum::AudioCapture) {
+      RefPtr<MediaEngineWebRTCAudioCaptureSource> audioCaptureSource =
+        new MediaEngineWebRTCAudioCaptureSource(nullptr);
+      aSources->AppendElement(audioCaptureSource);
+      return;
+    }
+
+    if (!mAudioInput) {
+      if (!SupportsDuplex()) {
+        return;
+      }
+      mAudioInput = new mozilla::AudioInputCubeb();
+    }
 
-  if (mHasTabVideoSource || dom::MediaSourceEnum::Browser == aMediaSource) {
-    aVSources->AppendElement(new MediaEngineTabVideoSource());
+    int nDevices = 0;
+    mAudioInput->GetNumOfRecordingDevices(nDevices);
+    int i;
+#if defined(MOZ_WIDGET_ANDROID)
+    i = 0; // Bug 1037025 - let the OS handle defaulting for now on android/b2g
+#else
+    // -1 is "default communications device" depending on OS in webrtc.org code
+    i = -1;
+#endif
+    for (; i < nDevices; i++) {
+      // We use constants here because GetRecordingDeviceName takes char[128].
+      char deviceName[128];
+      char uniqueId[128];
+      // paranoia; jingle doesn't bother with this
+      deviceName[0] = '\0';
+      uniqueId[0] = '\0';
+
+      int error = mAudioInput->GetRecordingDeviceName(i, deviceName, uniqueId);
+      if (error) {
+        LOG((" AudioInput::GetRecordingDeviceName: Failed %d", error));
+        continue;
+      }
+
+      if (uniqueId[0] == '\0') {
+        // Mac and Linux don't set uniqueId!
+        strcpy(uniqueId, deviceName); // safe given assert and initialization/error-check
+      }
+
+      NS_ConvertUTF8toUTF16 uuid(uniqueId);
+      RefPtr<MediaEngineSource> aSource = mAudioSources.Get(uuid);
+      if (aSource && aSource->RequiresSharing()) {
+        // We've already seen this device, just append.
+        aSources->AppendElement(aSource.get());
+      } else {
+        aSource = new MediaEngineWebRTCMicrophoneSource(
+            new mozilla::AudioInputCubeb(i),
+            i, deviceName, uniqueId,
+            mDelayAgnostic, mExtendedFilter);
+        mAudioSources.Put(uuid, aSource); // Hashtable takes ownership.
+        aSources->AppendElement(aSource);
+      }
+    }
   }
 }
 
 bool
 MediaEngineWebRTC::SupportsDuplex()
 {
   return mFullDuplex;
 }
 
 void
-MediaEngineWebRTC::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource,
-                                         nsTArray<RefPtr<MediaEngineAudioSource> >* aASources)
-{
-  ScopedCustomReleasePtr<webrtc::VoEBase> ptrVoEBase;
-  // We spawn threads to handle gUM runnables, so we must protect the member vars
-  MutexAutoLock lock(mMutex);
-
-  if (aMediaSource == dom::MediaSourceEnum::AudioCapture) {
-    RefPtr<MediaEngineWebRTCAudioCaptureSource> audioCaptureSource =
-      new MediaEngineWebRTCAudioCaptureSource(nullptr);
-    aASources->AppendElement(audioCaptureSource);
-    return;
-  }
-
-  if (!mAudioInput) {
-    if (!SupportsDuplex()) {
-      return;
-    }
-    mAudioInput = new mozilla::AudioInputCubeb();
-  }
-
-  int nDevices = 0;
-  mAudioInput->GetNumOfRecordingDevices(nDevices);
-  int i;
-#if defined(MOZ_WIDGET_ANDROID)
-  i = 0; // Bug 1037025 - let the OS handle defaulting for now on android/b2g
-#else
-  // -1 is "default communications device" depending on OS in webrtc.org code
-  i = -1;
-#endif
-  for (; i < nDevices; i++) {
-    // We use constants here because GetRecordingDeviceName takes char[128].
-    char deviceName[128];
-    char uniqueId[128];
-    // paranoia; jingle doesn't bother with this
-    deviceName[0] = '\0';
-    uniqueId[0] = '\0';
-
-    int error = mAudioInput->GetRecordingDeviceName(i, deviceName, uniqueId);
-    if (error) {
-      LOG((" AudioInput::GetRecordingDeviceName: Failed %d", error));
-      continue;
-    }
-
-    if (uniqueId[0] == '\0') {
-      // Mac and Linux don't set uniqueId!
-      strcpy(uniqueId, deviceName); // safe given assert and initialization/error-check
-    }
-
-    RefPtr<MediaEngineAudioSource> aSource;
-    NS_ConvertUTF8toUTF16 uuid(uniqueId);
-    if (mAudioSources.Get(uuid, getter_AddRefs(aSource))) {
-      // We've already seen this device, just append.
-      aASources->AppendElement(aSource.get());
-    } else {
-      aSource = new MediaEngineWebRTCMicrophoneSource(new mozilla::AudioInputCubeb(i),
-                                                      i, deviceName, uniqueId,
-                                                      mDelayAgnostic, mExtendedFilter);
-      mAudioSources.Put(uuid, aSource); // Hashtable takes ownership.
-      aASources->AppendElement(aSource);
-    }
-  }
-}
-
-void
 MediaEngineWebRTC::Shutdown()
 {
   // This is likely paranoia
   MutexAutoLock lock(mMutex);
 
   if (camera::GetCamerasChildIfExists()) {
     camera::GetChildAndCall(
       &camera::CamerasChild::RemoveDeviceChangeCallback, this);
   }
 
   LOG(("%s", __FUNCTION__));
   // Shutdown all the sources, since we may have dangling references to the
   // sources in nsDOMUserMediaStreams waiting for GC/CC
   for (auto iter = mVideoSources.Iter(); !iter.Done(); iter.Next()) {
-    MediaEngineVideoSource* source = iter.UserData();
+    MediaEngineSource* source = iter.UserData();
     if (source) {
       source->Shutdown();
     }
   }
   for (auto iter = mAudioSources.Iter(); !iter.Done(); iter.Next()) {
-    MediaEngineAudioSource* source = iter.UserData();
+    MediaEngineSource* source = iter.UserData();
     if (source) {
       source->Shutdown();
     }
   }
   mVideoSources.Clear();
   mAudioSources.Clear();
 
   mozilla::camera::Shutdown();
--- a/dom/media/webrtc/MediaEngineWebRTC.h
+++ b/dom/media/webrtc/MediaEngineWebRTC.h
@@ -1,154 +1,142 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MEDIAENGINEWEBRTC_H_
 #define MEDIAENGINEWEBRTC_H_
 
-#include "prcvar.h"
-#include "prthread.h"
-#include "nsIThread.h"
-#include "nsIRunnable.h"
-
+#include "AudioPacketizer.h"
+#include "AudioSegment.h"
+#include "CamerasChild.h"
+#include "cubeb/cubeb.h"
+#include "CubebUtils.h"
+#include "DOMMediaStream.h"
+#include "ipc/IPCMessageUtils.h"
+#include "MediaEngine.h"
+#include "MediaEnginePrefs.h"
+#include "MediaEngineSource.h"
+#include "MediaEngineWrapper.h"
+#include "MediaStreamGraph.h"
 #include "mozilla/dom/File.h"
+#include "mozilla/dom/MediaStreamTrackBinding.h"
 #include "mozilla/Mutex.h"
+#include "mozilla/Mutex.h"
+#include "mozilla/Sprintf.h"
 #include "mozilla/StaticMutex.h"
-#include "mozilla/Monitor.h"
-#include "mozilla/Sprintf.h"
 #include "mozilla/UniquePtr.h"
 #include "nsAutoPtr.h"
+#include "nsComponentManagerUtils.h"
 #include "nsCOMPtr.h"
-#include "nsThreadUtils.h"
-#include "DOMMediaStream.h"
 #include "nsDirectoryServiceDefs.h"
-#include "nsComponentManagerUtils.h"
+#include "nsIThread.h"
+#include "nsIRunnable.h"
 #include "nsRefPtrHashtable.h"
-
-#include "ipc/IPCMessageUtils.h"
-#include "VideoUtils.h"
-#include "MediaEngineCameraVideoSource.h"
+#include "nsThreadUtils.h"
+#include "NullTransport.h"
+#include "prcvar.h"
+#include "prthread.h"
+#include "StreamTracks.h"
 #include "VideoSegment.h"
-#include "AudioSegment.h"
-#include "StreamTracks.h"
-#include "MediaStreamGraph.h"
-#include "cubeb/cubeb.h"
-#include "CubebUtils.h"
-#include "AudioPacketizer.h"
-
-#include "MediaEngineWrapper.h"
-#include "mozilla/dom/MediaStreamTrackBinding.h"
-#include "CamerasChild.h"
+#include "VideoUtils.h"
 
 // WebRTC library includes follow
 // Audio Engine
 #include "webrtc/voice_engine/include/voe_base.h"
 #include "webrtc/voice_engine/include/voe_codec.h"
 #include "webrtc/voice_engine/include/voe_network.h"
 #include "webrtc/voice_engine/include/voe_audio_processing.h"
 #include "webrtc/voice_engine/include/voe_volume_control.h"
 #include "webrtc/voice_engine/include/voe_external_media.h"
 #include "webrtc/voice_engine/include/voe_audio_processing.h"
 #include "webrtc/modules/audio_device/include/audio_device.h"
 #include "webrtc/modules/audio_processing/include/audio_processing.h"
-
 // Video Engine
 // conflicts with #include of scoped_ptr.h
 #undef FF
-
-// WebRTC imports
 #include "webrtc/modules/video_capture/video_capture_defines.h"
 
-#include "NullTransport.h"
-
 namespace mozilla {
 
-class MediaEngineWebRTCAudioCaptureSource : public MediaEngineAudioSource
+class MediaEngineWebRTCMicrophoneSource;
+
+class MediaEngineWebRTCAudioCaptureSource : public MediaEngineSource
 {
 public:
-  NS_DECL_THREADSAFE_ISUPPORTS
-
   explicit MediaEngineWebRTCAudioCaptureSource(const char* aUuid)
-    : MediaEngineAudioSource(kReleased)
   {
   }
-  void GetName(nsAString& aName) const override;
-  void GetUUID(nsACString& aUUID) const override;
-  nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
-                    const MediaEnginePrefs& aPrefs,
+  nsString GetName() const override;
+  nsCString GetUUID() const override;
+  bool IsAvailable() const override
+  {
+    AssertIsOnOwningThread();
+    return false;
+  }
+  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
+                    const MediaEnginePrefs &aPrefs,
                     const nsString& aDeviceId,
-                    const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
+                    const ipc::PrincipalInfo& aPrincipalInfo,
                     AllocationHandle** aOutHandle,
                     const char** aOutBadConstraint) override
   {
     // Nothing to do here, everything is managed in MediaManager.cpp
     *aOutHandle = nullptr;
     return NS_OK;
   }
-  nsresult Deallocate(AllocationHandle* aHandle) override
+  nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override
   {
     // Nothing to do here, everything is managed in MediaManager.cpp
     MOZ_ASSERT(!aHandle);
     return NS_OK;
   }
-  nsresult Start(SourceMediaStream* aMediaStream,
-                 TrackID aId,
+  nsresult Start(SourceMediaStream* aStream,
+                 TrackID aTrackID,
                  const PrincipalHandle& aPrincipalHandle) override;
-  nsresult Stop(SourceMediaStream* aMediaStream, TrackID aId) override;
-  nsresult Restart(AllocationHandle* aHandle,
-                   const dom::MediaTrackConstraints& aConstraints,
-                   const MediaEnginePrefs &aPrefs,
-                   const nsString& aDeviceId,
-                   const char** aOutBadConstraint) override;
-  void NotifyOutputData(MediaStreamGraph* aGraph,
-                        AudioDataValue* aBuffer, size_t aFrames,
-                        TrackRate aRate, uint32_t aChannels) override
-  {}
-  void DeviceChanged() override
-  {}
-  void NotifyInputData(MediaStreamGraph* aGraph,
-                       const AudioDataValue* aBuffer, size_t aFrames,
-                       TrackRate aRate, uint32_t aChannels) override
-  {}
-  void NotifyPull(MediaStreamGraph* aGraph,
-                  SourceMediaStream* aSource,
-                  TrackID aID,
-                  StreamTime aDesiredTime,
-                  const PrincipalHandle& aPrincipalHandle) override
+  nsresult Stop(SourceMediaStream* aStream, TrackID aTrackID) override;
+  nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                       const dom::MediaTrackConstraints& aConstraints,
+                       const MediaEnginePrefs& aPrefs,
+                       const nsString& aDeviceId,
+                       const char** aOutBadConstraint) override;
+
+  void Pull(const RefPtr<const AllocationHandle>& aHandle,
+            const RefPtr<SourceMediaStream>& aStream,
+            TrackID aTrackID,
+            StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipalHandle) override
   {
     // The AudioCapture setup code in MediaManager creates a dummy
     // SourceMediaStream that is not actually exposed to content.
     // We append null data here just to keep the MediaStreamGraph happy.
-    StreamTime delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
+    StreamTime delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
     if (delta > 0) {
       AudioSegment segment;
       segment.AppendNullData(delta);
-      aSource->AppendToTrack(aID, &segment);
+      aStream->AppendToTrack(aTrackID, &segment);
     }
   }
+
   dom::MediaSourceEnum GetMediaSource() const override
   {
     return dom::MediaSourceEnum::AudioCapture;
   }
-  bool IsFake() override
-  {
-    return false;
-  }
+
   nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
   {
     return NS_ERROR_NOT_IMPLEMENTED;
   }
+
   uint32_t GetBestFitnessDistance(
     const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
     const nsString& aDeviceId) const override;
 
 protected:
-  virtual ~MediaEngineWebRTCAudioCaptureSource() {}
-  nsCString mUUID;
+  virtual ~MediaEngineWebRTCAudioCaptureSource() = default;
 };
 
 // Small subset of VoEHardware
 class AudioInput
 {
 public:
   AudioInput() = default;
   // Threadsafe because it's referenced from an MicrophoneSource, which can
@@ -163,17 +151,17 @@ public:
   virtual int GetMaxAvailableChannels(uint32_t& aChannels) = 0;
   virtual void StartRecording(SourceMediaStream *aStream, AudioDataListener *aListener) = 0;
   virtual void StopRecording(SourceMediaStream *aStream) = 0;
   virtual int SetRecordingDevice(int aIndex) = 0;
   virtual void SetUserChannelCount(uint32_t aChannels) = 0;
 
 protected:
   // Protected destructor, to discourage deletion outside of Release():
-  virtual ~AudioInput() {}
+  virtual ~AudioInput() = default;
 };
 
 class AudioInputCubeb final : public AudioInput
 {
 public:
   explicit AudioInputCubeb(int aIndex = 0) :
     AudioInput(), mSelectedDevice(aIndex), mInUseCount(0)
   {
@@ -371,203 +359,250 @@ private:
 
 class WebRTCAudioDataListener : public AudioDataListener
 {
 protected:
   // Protected destructor, to discourage deletion outside of Release():
   virtual ~WebRTCAudioDataListener() {}
 
 public:
-  explicit WebRTCAudioDataListener(MediaEngineAudioSource* aAudioSource)
-    : mMutex("WebRTCAudioDataListener")
+  explicit WebRTCAudioDataListener(MediaEngineWebRTCMicrophoneSource* aAudioSource)
+    : mMutex("WebRTCAudioDataListener::mMutex")
     , mAudioSource(aAudioSource)
   {}
 
   // AudioDataListenerInterface methods
-  virtual void NotifyOutputData(MediaStreamGraph* aGraph,
-                                AudioDataValue* aBuffer, size_t aFrames,
-                                TrackRate aRate, uint32_t aChannels) override
-  {
-    MutexAutoLock lock(mMutex);
-    if (mAudioSource) {
-      mAudioSource->NotifyOutputData(aGraph, aBuffer, aFrames, aRate, aChannels);
-    }
-  }
-  virtual void NotifyInputData(MediaStreamGraph* aGraph,
-                               const AudioDataValue* aBuffer, size_t aFrames,
-                               TrackRate aRate, uint32_t aChannels) override
-  {
-    MutexAutoLock lock(mMutex);
-    if (mAudioSource) {
-      mAudioSource->NotifyInputData(aGraph, aBuffer, aFrames, aRate, aChannels);
-    }
-  }
-  virtual void DeviceChanged() override
-  {
-    MutexAutoLock lock(mMutex);
-    if (mAudioSource) {
-      mAudioSource->DeviceChanged();
-    }
-  }
+  void NotifyOutputData(MediaStreamGraph* aGraph,
+                        AudioDataValue* aBuffer,
+                        size_t aFrames,
+                        TrackRate aRate,
+                        uint32_t aChannels) override;
 
-  void Shutdown()
-  {
-    MutexAutoLock lock(mMutex);
-    mAudioSource = nullptr;
-  }
+  void NotifyInputData(MediaStreamGraph* aGraph,
+                       const AudioDataValue* aBuffer,
+                       size_t aFrames,
+                       TrackRate aRate,
+                       uint32_t aChannels) override;
+
+  void DeviceChanged() override;
+
+  void Shutdown();
 
 private:
   Mutex mMutex;
-  RefPtr<MediaEngineAudioSource> mAudioSource;
+  RefPtr<MediaEngineWebRTCMicrophoneSource> mAudioSource;
 };
 
-class MediaEngineWebRTCMicrophoneSource : public MediaEngineAudioSource
+class MediaEngineWebRTCMicrophoneSource : public MediaEngineSource,
+                                          public AudioDataListenerInterface
 {
-  typedef MediaEngineAudioSource Super;
 public:
   MediaEngineWebRTCMicrophoneSource(mozilla::AudioInput* aAudioInput,
                                     int aIndex,
                                     const char* name,
                                     const char* uuid,
                                     bool aDelayAgnostic,
                                     bool aExtendedFilter);
 
-  void GetName(nsAString& aName) const override;
-  void GetUUID(nsACString& aUUID) const override;
+  bool RequiresSharing() const override
+  {
+    return true;
+  }
+
+  nsString GetName() const override;
+  nsCString GetUUID() const override;
 
-  nsresult Deallocate(AllocationHandle* aHandle) override;
+  bool IsAvailable() const override
+  {
+    AssertIsOnOwningThread();
+    return mState == kReleased;
+  }
+
+  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
+                    const MediaEnginePrefs& aPrefs,
+                    const nsString& aDeviceId,
+                    const ipc::PrincipalInfo& aPrincipalInfo,
+                    AllocationHandle** aOutHandle,
+                    const char** aOutBadConstraint) override;
+  nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Start(SourceMediaStream* aStream,
-                 TrackID aID,
+                 TrackID aTrackID,
                  const PrincipalHandle& aPrincipalHandle) override;
-  nsresult Stop(SourceMediaStream* aSource, TrackID aID) override;
-  nsresult Restart(AllocationHandle* aHandle,
-                   const dom::MediaTrackConstraints& aConstraints,
-                   const MediaEnginePrefs &aPrefs,
-                   const nsString& aDeviceId,
-                   const char** aOutBadConstraint) override;
+  nsresult Stop(SourceMediaStream* aStream, TrackID aTrackID) override;
+  nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                       const dom::MediaTrackConstraints& aConstraints,
+                       const MediaEnginePrefs& aPrefs,
+                       const nsString& aDeviceId,
+                       const char** aOutBadConstraint) override;
 
-  void NotifyPull(MediaStreamGraph* aGraph,
-                  SourceMediaStream* aSource,
-                  TrackID aId,
-                  StreamTime aDesiredTime,
-                  const PrincipalHandle& aPrincipalHandle) override;
+  void Pull(const RefPtr<const AllocationHandle>& aHandle,
+            const RefPtr<SourceMediaStream>& aStream,
+            TrackID aTrackID,
+            StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipalHandle) override;
 
   // AudioDataListenerInterface methods
   void NotifyOutputData(MediaStreamGraph* aGraph,
                         AudioDataValue* aBuffer, size_t aFrames,
                         TrackRate aRate, uint32_t aChannels) override;
   void NotifyInputData(MediaStreamGraph* aGraph,
                        const AudioDataValue* aBuffer, size_t aFrames,
                        TrackRate aRate, uint32_t aChannels) override;
 
   void DeviceChanged() override;
 
-  bool IsFake() override {
-    return false;
-  }
-
-  dom::MediaSourceEnum GetMediaSource() const override {
+  dom::MediaSourceEnum GetMediaSource() const override
+  {
     return dom::MediaSourceEnum::Microphone;
   }
 
   nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
   {
     return NS_ERROR_NOT_IMPLEMENTED;
   }
 
   uint32_t GetBestFitnessDistance(
-      const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
-      const nsString& aDeviceId) const override;
+    const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
+    const nsString& aDeviceId) const override;
 
   void Shutdown() override;
 
-  NS_DECL_THREADSAFE_ISUPPORTS
-
 protected:
   ~MediaEngineWebRTCMicrophoneSource() {}
 
 private:
-  nsresult
-  UpdateSingleSource(const AllocationHandle* aHandle,
-                     const NormalizedConstraints& aNetConstraints,
-                     const NormalizedConstraints& aNewConstraint,
-                     const MediaEnginePrefs& aPrefs,
-                     const nsString& aDeviceId,
-                     const char** aOutBadConstraint) override;
+  /**
+   * Representation of data tied to an AllocationHandle rather than to the source.
+   */
+  struct Allocation {
+    Allocation() = delete;
+    explicit Allocation(const RefPtr<AllocationHandle>& aHandle);
+    ~Allocation();
+
+    const RefPtr<AllocationHandle> mHandle;
+    RefPtr<SourceMediaStream> mStream;
+    TrackID mTrackID = TRACK_NONE;
+    PrincipalHandle mPrincipal = PRINCIPAL_HANDLE_NONE;
+  };
+
+  /**
+   * Used with nsTArray<Allocation>::IndexOf to locate an Allocation by a handle.
+   */
+  class AllocationHandleComparator {
+  public:
+    bool Equals(const Allocation& aAllocation,
+                const RefPtr<const AllocationHandle>& aHandle) const
+    {
+      return aHandle == aAllocation.mHandle;
+    }
+  };
+
+  /**
+   * Reevaluates the aggregated constraints of all allocations and restarts the
+   * underlying device if necessary.
+   *
+   * If the given AllocationHandle was already registered, its constraints will
+   * be updated before reevaluation. If not, they will be added before
+   * reevaluation.
+   */
+  nsresult ReevaluateAllocation(const RefPtr<AllocationHandle>& aHandle,
+                                const NormalizedConstraints* aConstraintsUpdate,
+                                const MediaEnginePrefs& aPrefs,
+                                const nsString& aDeviceId,
+                                const char** aOutBadConstraint);
+
+  /**
+   * Updates the underlying (single) device with the aggregated constraints
+   * aNetConstraints. If the chosen settings for the device changes based on
+   * these new constraints, and capture is active, the device will be restarted.
+   */
+  nsresult UpdateSingleSource(const RefPtr<const AllocationHandle>& aHandle,
+                              const NormalizedConstraints& aNetConstraints,
+                              const MediaEnginePrefs& aPrefs,
+                              const nsString& aDeviceId,
+                              const char** aOutBadConstraint);
 
 
   void UpdateAECSettingsIfNeeded(bool aEnable, webrtc::EcModes aMode);
   void UpdateAGCSettingsIfNeeded(bool aEnable, webrtc::AgcModes aMode);
   void UpdateNSSettingsIfNeeded(bool aEnable, webrtc::NsModes aMode);
 
   void SetLastPrefs(const MediaEnginePrefs& aPrefs);
 
   // These allocate/configure and release the channel
   bool AllocChannel();
   void FreeChannel();
+
   template<typename T>
   void InsertInGraph(const T* aBuffer,
                      size_t aFrames,
                      uint32_t aChannels);
 
   void PacketizeAndProcess(MediaStreamGraph* aGraph,
                            const AudioDataValue* aBuffer,
                            size_t aFrames,
                            TrackRate aRate,
                            uint32_t aChannels);
 
 
   // This is true when all processing is disabled, we can skip
   // packetization, resampling and other processing passes.
-  bool PassThrough() {
-    return mSkipProcessing;
-  }
-  void SetPassThrough(bool aPassThrough) {
-    mSkipProcessing = aPassThrough;
-  }
+  // Graph thread only.
+  bool PassThrough() const;
+
+  // Graph thread only.
+  void SetPassThrough(bool aPassThrough);
 
   RefPtr<mozilla::AudioInput> mAudioInput;
   RefPtr<WebRTCAudioDataListener> mListener;
 
   // Note: shared across all microphone sources
   static int sChannelsOpen;
 
   const UniquePtr<webrtc::AudioProcessing> mAudioProcessing;
 
   // accessed from the GraphDriver thread except for deletion.
   nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerInput;
   nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerOutput;
 
-  // mMonitor protects mSources[] and mPrinicpalIds[] access/changes, and
-  // transitions of mState from kStarted to kStopped (which are combined with
-  // EndTrack()). mSources[] and mPrincipalHandles[] are accessed from webrtc
-  // threads.
-  Monitor mMonitor;
-  nsTArray<RefPtr<SourceMediaStream>> mSources;
-  nsTArray<PrincipalHandle> mPrincipalHandles; // Maps to mSources.
+  // mMutex protects some of our members off the owning thread.
+  Mutex mMutex;
+
+  // We append an allocation in Allocate() and remove it in Deallocate().
+  // Both the array and the Allocation members are modified under mMutex on
+  // the owning thread. Accessed under one of the two.
+  nsTArray<Allocation> mAllocations;
+
+  // Current state of the shared resource for this source.
+  // Set under mMutex on the owning thread. Accessed under one of the two
+  MediaEngineSourceState mState = kReleased;
 
   int mCapIndex;
   bool mDelayAgnostic;
   bool mExtendedFilter;
-  MOZ_INIT_OUTSIDE_CTOR TrackID mTrackID;
   bool mStarted;
 
-  nsString mDeviceName;
-  nsCString mDeviceUUID;
+  const nsString mDeviceName;
+  const nsCString mDeviceUUID;
+
+  // The current settings for the underlying device.
+  // Member access is main thread only after construction.
+  const nsMainThreadPtrHandle<media::Refcountable<dom::MediaTrackSettings>> mSettings;
 
   uint64_t mTotalFrames;
   uint64_t mLastLogFrames;
 
   // mSkipProcessing is true if none of the processing passes are enabled,
   // because of prefs or constraints. This allows simply copying the audio into
   // the MSG, skipping resampling and the whole webrtc.org code.
   // This is read and written to only on the MSG thread.
   bool mSkipProcessing;
 
   // To only update microphone when needed, we keep track of previous settings.
+  // Owning thread only.
   MediaEnginePrefs mLastPrefs;
 
   // Stores the mixed audio output for the reverse-stream of the AEC.
   AlignedFloatBuffer mOutputBuffer;
 
   AlignedFloatBuffer mInputBuffer;
   AlignedFloatBuffer mDeinterleavedBuffer;
   AlignedFloatBuffer mInputDownmixBuffer;
@@ -583,34 +618,32 @@ public:
 
   // Clients should ensure to clean-up sources video/audio sources
   // before invoking Shutdown on this class.
   void Shutdown() override;
 
   // Returns whether the host supports duplex audio stream.
   bool SupportsDuplex();
 
-  void EnumerateVideoDevices(dom::MediaSourceEnum,
-                             nsTArray<RefPtr<MediaEngineVideoSource>>*) override;
-  void EnumerateAudioDevices(dom::MediaSourceEnum,
-                             nsTArray<RefPtr<MediaEngineAudioSource>>*) override;
+  void EnumerateDevices(dom::MediaSourceEnum,
+                        nsTArray<RefPtr<MediaEngineSource>>*) override;
 private:
-  ~MediaEngineWebRTC() {}
+  ~MediaEngineWebRTC() = default;
 
   nsCOMPtr<nsIThread> mThread;
 
   // gUM runnables can e.g. Enumerate from multiple threads
   Mutex mMutex;
   RefPtr<mozilla::AudioInput> mAudioInput;
   bool mFullDuplex;
   bool mDelayAgnostic;
   bool mExtendedFilter;
   bool mHasTabVideoSource;
 
   // Store devices we've already seen in a hashtable for quick return.
   // Maps UUID to MediaEngineSource (one set for audio, one for video).
-  nsRefPtrHashtable<nsStringHashKey, MediaEngineVideoSource> mVideoSources;
-  nsRefPtrHashtable<nsStringHashKey, MediaEngineAudioSource> mAudioSources;
+  nsRefPtrHashtable<nsStringHashKey, MediaEngineSource> mVideoSources;
+  nsRefPtrHashtable<nsStringHashKey, MediaEngineSource> mAudioSources;
 };
 
 }
 
 #endif /* NSMEDIAENGINEWEBRTC_H_ */
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -1,22 +1,26 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaEngineWebRTC.h"
+
 #include <stdio.h>
 #include <algorithm>
+
+#include "AllocationHandle.h"
+#include "AudioConverter.h"
+#include "MediaManager.h"
+#include "MediaStreamGraphImpl.h"
+#include "MediaTrackConstraints.h"
 #include "mozilla/Assertions.h"
-#include "MediaTrackConstraints.h"
 #include "mtransport/runnable_utils.h"
 #include "nsAutoPtr.h"
-#include "AudioConverter.h"
-#include "MediaStreamGraphImpl.h"
 
 // scoped_ptr.h uses FF
 #ifdef FF
 #undef FF
 #endif
 #include "webrtc/modules/audio_device/opensl/single_rw_fifo.h"
 #include "webrtc/voice_engine/voice_engine_defines.h"
 #include "webrtc/modules/audio_processing/include/audio_processing.h"
@@ -32,75 +36,125 @@ using namespace webrtc;
 static_assert(!(MAX_AEC_FIFO_DEPTH % 10), "Invalid MAX_AEC_FIFO_DEPTH");
 
 namespace mozilla {
 
 #ifdef LOG
 #undef LOG
 #endif
 
-extern LogModule* GetMediaManagerLog();
+LogModule* GetMediaManagerLog();
 #define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
 #define LOG_FRAMES(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
 
 LogModule* AudioLogModule() {
   static mozilla::LazyLogModule log("AudioLatency");
   return static_cast<LogModule*>(log);
 }
 
+void
+WebRTCAudioDataListener::NotifyOutputData(MediaStreamGraph* aGraph,
+                                          AudioDataValue* aBuffer,
+                                          size_t aFrames,
+                                          TrackRate aRate,
+                                          uint32_t aChannels)
+{
+  MutexAutoLock lock(mMutex);
+  if (mAudioSource) {
+    mAudioSource->NotifyOutputData(aGraph, aBuffer, aFrames, aRate, aChannels);
+  }
+}
+
+void
+WebRTCAudioDataListener::NotifyInputData(MediaStreamGraph* aGraph,
+                                         const AudioDataValue* aBuffer,
+                                         size_t aFrames,
+                                         TrackRate aRate,
+                                         uint32_t aChannels)
+{
+  MutexAutoLock lock(mMutex);
+  if (mAudioSource) {
+    mAudioSource->NotifyInputData(aGraph, aBuffer, aFrames, aRate, aChannels);
+  }
+}
+
+void
+WebRTCAudioDataListener::DeviceChanged()
+{
+  MutexAutoLock lock(mMutex);
+  if (mAudioSource) {
+    mAudioSource->DeviceChanged();
+  }
+}
+
+void
+WebRTCAudioDataListener::Shutdown()
+{
+  MutexAutoLock lock(mMutex);
+  mAudioSource = nullptr;
+}
+
 /**
- * Webrtc microphone source source.
+ * WebRTC Microphone MediaEngineSource.
  */
-NS_IMPL_ISUPPORTS0(MediaEngineWebRTCMicrophoneSource)
-NS_IMPL_ISUPPORTS0(MediaEngineWebRTCAudioCaptureSource)
+int MediaEngineWebRTCMicrophoneSource::sChannelsOpen = 0;
 
-int MediaEngineWebRTCMicrophoneSource::sChannelsOpen = 0;
+MediaEngineWebRTCMicrophoneSource::Allocation::Allocation(
+    const RefPtr<AllocationHandle>& aHandle)
+  : mHandle(aHandle)
+{}
+
+MediaEngineWebRTCMicrophoneSource::Allocation::~Allocation() = default;
 
 MediaEngineWebRTCMicrophoneSource::MediaEngineWebRTCMicrophoneSource(
     mozilla::AudioInput* aAudioInput,
     int aIndex,
-    const char* name,
-    const char* uuid,
+    const char* aDeviceName,
+    const char* aDeviceUUID,
     bool aDelayAgnostic,
     bool aExtendedFilter)
-  : MediaEngineAudioSource(kReleased)
-  , mAudioInput(aAudioInput)
+  : mAudioInput(aAudioInput)
   , mAudioProcessing(AudioProcessing::Create())
-  , mMonitor("WebRTCMic.Monitor")
+  , mMutex("WebRTCMic::Mutex")
   , mCapIndex(aIndex)
   , mDelayAgnostic(aDelayAgnostic)
   , mExtendedFilter(aExtendedFilter)
-  , mTrackID(TRACK_NONE)
   , mStarted(false)
+  , mDeviceName(NS_ConvertUTF8toUTF16(aDeviceName))
+  , mDeviceUUID(aDeviceUUID)
+  , mSettings(
+      new nsMainThreadPtrHolder<media::Refcountable<dom::MediaTrackSettings>>(
+        "MediaEngineWebRTCMicrophoneSource::mSettings",
+        new media::Refcountable<dom::MediaTrackSettings>(),
+        // Non-strict means it won't assert main thread for us.
+        // It would be great if it did but we're already on the media thread.
+        /* aStrict = */ false))
   , mTotalFrames(0)
   , mLastLogFrames(0)
   , mSkipProcessing(false)
   , mInputDownmixBuffer(MAX_SAMPLING_FREQ * MAX_CHANNELS / 100)
 {
   MOZ_ASSERT(aAudioInput);
-  mDeviceName.Assign(NS_ConvertUTF8toUTF16(name));
-  mDeviceUUID.Assign(uuid);
-  mListener = new mozilla::WebRTCAudioDataListener(this);
   mSettings->mEchoCancellation.Construct(0);
   mSettings->mAutoGainControl.Construct(0);
   mSettings->mNoiseSuppression.Construct(0);
   mSettings->mChannelCount.Construct(0);
   // We'll init lazily as needed
 }
 
-void
-MediaEngineWebRTCMicrophoneSource::GetName(nsAString& aName) const
+nsString
+MediaEngineWebRTCMicrophoneSource::GetName() const
 {
-  aName.Assign(mDeviceName);
+  return mDeviceName;
 }
 
-void
-MediaEngineWebRTCMicrophoneSource::GetUUID(nsACString& aUUID) const
+nsCString
+MediaEngineWebRTCMicrophoneSource::GetUUID() const
 {
-  aUUID.Assign(mDeviceUUID);
+  return mDeviceUUID;
 }
 
 // GetBestFitnessDistance returns the best distance the capture device can offer
 // as a whole, given an accumulated number of ConstraintSets.
 // Ideal values are considered in the first ConstraintSet only.
 // Plain values are treated as Ideal in the first ConstraintSet.
 // Plain values are treated as Exact in subsequent ConstraintSets.
 // Infinity = UINT32_MAX e.g. device cannot satisfy accumulated ConstraintSets.
@@ -108,28 +162,74 @@ MediaEngineWebRTCMicrophoneSource::GetUU
 
 uint32_t MediaEngineWebRTCMicrophoneSource::GetBestFitnessDistance(
     const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
     const nsString& aDeviceId) const
 {
   uint32_t distance = 0;
 
   for (const auto* cs : aConstraintSets) {
-    distance = GetMinimumFitnessDistance(*cs, aDeviceId);
+    distance = MediaConstraintsHelper::GetMinimumFitnessDistance(*cs, aDeviceId);
     break; // distance is read from first entry only
   }
   return distance;
 }
 
 nsresult
-MediaEngineWebRTCMicrophoneSource::Restart(AllocationHandle* aHandle,
-                                           const dom::MediaTrackConstraints& aConstraints,
-                                           const MediaEnginePrefs &aPrefs,
-                                           const nsString& aDeviceId,
-                                           const char** aOutBadConstraint)
+MediaEngineWebRTCMicrophoneSource::ReevaluateAllocation(
+    const RefPtr<AllocationHandle>& aHandle,
+    const NormalizedConstraints* aConstraintsUpdate,
+    const MediaEnginePrefs& aPrefs,
+    const nsString& aDeviceId,
+    const char** aOutBadConstraint)
+{
+  AssertIsOnOwningThread();
+
+  // aHandle and/or aConstraintsUpdate may be nullptr (see below)
+
+  AutoTArray<const NormalizedConstraints*, 10> allConstraints;
+  for (const Allocation& registered : mAllocations) {
+    if (aConstraintsUpdate && registered.mHandle == aHandle) {
+      continue; // Don't count old constraints
+    }
+    allConstraints.AppendElement(&registered.mHandle->mConstraints);
+  }
+  if (aConstraintsUpdate) {
+    allConstraints.AppendElement(aConstraintsUpdate);
+  } else if (aHandle) {
+    // In the case of AddShareOfSingleSource, the handle isn't registered yet.
+    allConstraints.AppendElement(&aHandle->mConstraints);
+  }
+
+  NormalizedConstraints netConstraints(allConstraints);
+  if (netConstraints.mBadConstraint) {
+    *aOutBadConstraint = netConstraints.mBadConstraint;
+    return NS_ERROR_FAILURE;
+  }
+
+  nsresult rv = UpdateSingleSource(aHandle,
+                                   netConstraints,
+                                   aPrefs,
+                                   aDeviceId,
+                                   aOutBadConstraint);
+  if (NS_FAILED(rv)) {
+    return rv;
+  }
+  if (aHandle && aConstraintsUpdate) {
+    aHandle->mConstraints = *aConstraintsUpdate;
+  }
+  return NS_OK;
+}
+
+nsresult
+MediaEngineWebRTCMicrophoneSource::Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                                               const dom::MediaTrackConstraints& aConstraints,
+                                               const MediaEnginePrefs& aPrefs,
+                                               const nsString& aDeviceId,
+                                               const char** aOutBadConstraint)
 {
   AssertIsOnOwningThread();
   MOZ_ASSERT(aHandle);
   NormalizedConstraints constraints(aConstraints);
   return ReevaluateAllocation(aHandle, &constraints, aPrefs, aDeviceId,
                               aOutBadConstraint);
 }
 
@@ -145,16 +245,18 @@ do {                                    
   if (rv != AudioProcessing::kNoError) {                    \
     MOZ_ASSERT_UNREACHABLE("APM error in " #fn);            \
     return;                                                 \
   }                                                         \
 } while(0);
 
 void MediaEngineWebRTCMicrophoneSource::UpdateAECSettingsIfNeeded(bool aEnable, EcModes aMode)
 {
+  AssertIsOnOwningThread();
+
   using webrtc::EcModes;
 
   EchoCancellation::SuppressionLevel level;
 
   switch(aMode) {
     case EcModes::kEcUnchanged:
       level = mAudioProcessing->echo_cancellation()->suppression_level();
       break;
@@ -188,16 +290,18 @@ void MediaEngineWebRTCMicrophoneSource::
     HANDLE_APM_ERROR(mAudioProcessing->echo_cancellation()->Enable(aEnable));
     HANDLE_APM_ERROR(mAudioProcessing->echo_cancellation()->set_suppression_level(level));
   }
 }
 
 void
 MediaEngineWebRTCMicrophoneSource::UpdateAGCSettingsIfNeeded(bool aEnable, AgcModes aMode)
 {
+  AssertIsOnOwningThread();
+
 #if defined(WEBRTC_IOS) || defined(ATA) || defined(WEBRTC_ANDROID)
   if (aMode == kAgcAdaptiveAnalog) {
     MOZ_LOG(GetMediaManagerLog(),
             LogLevel::Error,
             ("Invalid AGC mode kAgcAdaptiveAnalog on mobile"));
     MOZ_ASSERT_UNREACHABLE("Bad pref set in all.js or in about:config"
                            " for the auto gain, on mobile.");
     aMode = kAgcDefault;
@@ -231,16 +335,18 @@ MediaEngineWebRTCMicrophoneSource::Updat
 
   HANDLE_APM_ERROR(mAudioProcessing->gain_control()->set_mode(mode));
   HANDLE_APM_ERROR(mAudioProcessing->gain_control()->Enable(aEnable));
 }
 
 void
 MediaEngineWebRTCMicrophoneSource::UpdateNSSettingsIfNeeded(bool aEnable, NsModes aMode)
 {
+  AssertIsOnOwningThread();
+
   NoiseSuppression::Level nsLevel;
 
   switch (aMode) {
     case NsModes::kNsDefault:
       nsLevel = kDefaultNsMode;
       break;
     case NsModes::kNsUnchanged:
       nsLevel = mAudioProcessing->noise_suppression()->level();
@@ -269,23 +375,24 @@ MediaEngineWebRTCMicrophoneSource::Updat
   HANDLE_APM_ERROR(mAudioProcessing->noise_suppression()->set_level(nsLevel));
   HANDLE_APM_ERROR(mAudioProcessing->noise_suppression()->Enable(aEnable));
 }
 
 #undef HANDLE_APM_ERROR
 
 nsresult
 MediaEngineWebRTCMicrophoneSource::UpdateSingleSource(
-    const AllocationHandle* aHandle,
+    const RefPtr<const AllocationHandle>& aHandle,
     const NormalizedConstraints& aNetConstraints,
-    const NormalizedConstraints& aNewConstraint, /* Ignored */
     const MediaEnginePrefs& aPrefs,
     const nsString& aDeviceId,
     const char** aOutBadConstraint)
 {
+  AssertIsOnOwningThread();
+
   FlattenedConstraints c(aNetConstraints);
 
   MediaEnginePrefs prefs = aPrefs;
   prefs.mAecOn = c.mEchoCancellation.Get(prefs.mAecOn);
   prefs.mAgcOn = c.mAutoGainControl.Get(prefs.mAgcOn);
   prefs.mNoiseOn = c.mNoiseSuppression.Get(prefs.mNoiseOn);
   uint32_t maxChannels = 1;
   if (mAudioInput->GetMaxAvailableChannels(maxChannels) != 0) {
@@ -341,35 +448,42 @@ MediaEngineWebRTCMicrophoneSource::Updat
       break;
 
     case kStarted:
       if (prefs == mLastPrefs) {
         return NS_OK;
       }
 
       if (prefs.mChannels != mLastPrefs.mChannels) {
-        MOZ_ASSERT(mSources.Length() > 0);
         // If the channel count changed, tell the MSG to open a new driver with
         // the correct channel count.
-        auto& source = mSources.LastElement();
+        MOZ_ASSERT(!mAllocations.IsEmpty());
+        RefPtr<SourceMediaStream> stream;
+        for (const Allocation& allocation : mAllocations) {
+          if (allocation.mStream) {
+            stream = allocation.mStream;
+            break;
+          }
+        }
+        MOZ_ASSERT(stream);
+
         mAudioInput->SetUserChannelCount(prefs.mChannels);
         // Get validated number of channel
         uint32_t channelCount = 0;
         mAudioInput->GetChannelCount(channelCount);
         MOZ_ASSERT(channelCount > 0 && mLastPrefs.mChannels > 0);
         if (mLastPrefs.mChannels != prefs.mChannels &&
-            !source->OpenNewAudioCallbackDriver(mListener)) {
+            !stream->OpenNewAudioCallbackDriver(mListener)) {
           MOZ_LOG(GetMediaManagerLog(), LogLevel::Error, ("Could not open a new AudioCallbackDriver for input"));
           return NS_ERROR_FAILURE;
         }
       }
 
       if (MOZ_LOG_TEST(GetMediaManagerLog(), LogLevel::Debug)) {
-        MonitorAutoLock lock(mMonitor);
-        if (mSources.IsEmpty()) {
+        if (mAllocations.IsEmpty()) {
           LOG(("Audio device %d reallocated", mCapIndex));
         } else {
           LOG(("Audio device %d allocated shared", mCapIndex));
         }
       }
       break;
 
     default:
@@ -391,21 +505,29 @@ MediaEngineWebRTCMicrophoneSource::Updat
   return NS_OK;
 }
 
 #undef HANDLE_APM_ERROR
 
 void
 MediaEngineWebRTCMicrophoneSource::SetLastPrefs(const MediaEnginePrefs& aPrefs)
 {
+  AssertIsOnOwningThread();
+
   mLastPrefs = aPrefs;
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
-
-  NS_DispatchToMainThread(media::NewRunnableFrom([that, aPrefs]() mutable {
+  RefPtr<MediaStreamGraphImpl> graph;
+  for (const Allocation& allocation : mAllocations) {
+    if (allocation.mStream) {
+      graph = allocation.mStream->GraphImpl();
+      break;
+    }
+  }
+  NS_DispatchToMainThread(media::NewRunnableFrom([that, graph, aPrefs]() mutable {
     that->mSettings->mEchoCancellation.Value() = aPrefs.mAecOn;
     that->mSettings->mAutoGainControl.Value() = aPrefs.mAgcOn;
     that->mSettings->mNoiseSuppression.Value() = aPrefs.mNoiseOn;
     that->mSettings->mChannelCount.Value() = aPrefs.mChannels;
 
     class Message : public ControlMessage {
     public:
       Message(MediaEngineWebRTCMicrophoneSource* aSource,
@@ -421,156 +543,201 @@ MediaEngineWebRTCMicrophoneSource::SetLa
       }
 
     protected:
       RefPtr<MediaEngineWebRTCMicrophoneSource> mMicrophoneSource;
       bool mPassThrough;
     };
 
     bool passThrough = !(aPrefs.mAecOn || aPrefs.mAgcOn || aPrefs.mNoiseOn);
-    if (!that->mSources.IsEmpty()) {
-      that->mSources[0]->GraphImpl()->AppendMessage(MakeUnique<Message>(that, passThrough));
+    if (graph) {
+      graph->AppendMessage(MakeUnique<Message>(that, passThrough));
     }
 
     return NS_OK;
   }));
 }
 
 nsresult
-MediaEngineWebRTCMicrophoneSource::Deallocate(AllocationHandle* aHandle)
+MediaEngineWebRTCMicrophoneSource::Allocate(const dom::MediaTrackConstraints &aConstraints,
+                                            const MediaEnginePrefs& aPrefs,
+                                            const nsString& aDeviceId,
+                                            const ipc::PrincipalInfo& aPrincipalInfo,
+                                            AllocationHandle** aOutHandle,
+                                            const char** aOutBadConstraint)
+{
+  AssertIsOnOwningThread();
+  MOZ_ASSERT(aOutHandle);
+  auto handle = MakeRefPtr<AllocationHandle>(aConstraints, aPrincipalInfo,
+                                             aPrefs, aDeviceId);
+  nsresult rv = ReevaluateAllocation(handle, nullptr, aPrefs, aDeviceId,
+                                     aOutBadConstraint);
+  if (NS_FAILED(rv)) {
+    return rv;
+  }
+  mAllocations.AppendElement(Allocation(handle));
+  handle.forget(aOutHandle);
+  return NS_OK;
+}
+
+nsresult
+MediaEngineWebRTCMicrophoneSource::Deallocate(const RefPtr<const AllocationHandle>& aHandle)
 {
   AssertIsOnOwningThread();
 
-  Super::Deallocate(aHandle);
+  size_t i = mAllocations.IndexOf(aHandle, 0, AllocationHandleComparator());
+  MOZ_ASSERT(i != mAllocations.NoIndex);
+  {
+    MutexAutoLock lock(mMutex);
+    mAllocations.RemoveElementAt(i);
+  }
 
-  if (!mRegisteredHandles.Length()) {
+  if (mAllocations.IsEmpty()) {
     // If empty, no callbacks to deliver data should be occuring
-    if (mState != kStopped && mState != kAllocated) {
-      return NS_ERROR_FAILURE;
-    }
-
+    MOZ_ASSERT(mState != kReleased, "Source not allocated");
+    MOZ_ASSERT(mState != kStarted, "Source not stopped");
     FreeChannel();
     LOG(("Audio device %d deallocated", mCapIndex));
   } else {
     LOG(("Audio device %d deallocated but still in use", mCapIndex));
   }
   return NS_OK;
 }
 
 nsresult
 MediaEngineWebRTCMicrophoneSource::Start(SourceMediaStream *aStream,
-                                         TrackID aID,
-                                         const PrincipalHandle& aPrincipalHandle)
+                                         TrackID aTrackID,
+                                         const PrincipalHandle& aPrincipal)
 {
   AssertIsOnOwningThread();
-  if (sChannelsOpen == 0 || !aStream) {
-    return NS_ERROR_FAILURE;
-  }
+  MOZ_ASSERT(aStream);
+  MOZ_ASSERT(IsTrackIDExplicit(aTrackID));
 
   // Until we fix bug 1400488 we need to block a second tab (OuterWindow)
   // from opening an already-open device.  If it's the same tab, they
   // will share a Graph(), and we can allow it.
-  if (!mSources.IsEmpty() && aStream->Graph() != mSources[0]->Graph()) {
+  if (!mAllocations.IsEmpty() &&
+      aStream->Graph() != mAllocations[0].mStream->Graph()) {
     return NS_ERROR_NOT_AVAILABLE;
   }
 
+  Allocation* allocation = nullptr;
+  for (Allocation& a : mAllocations) {
+    if (!a.mStream) {
+      // This assumes Allocate() is always followed by Start() before another
+      // Allocate(). But this is changing in one of the coming patches anyway.
+      allocation = &a;
+      break;
+    }
+  }
+  MOZ_ASSERT(allocation);
+  // size_t i = mAllocations.IndexOf(aHandle, 0, AllocationHandleComparator());
+  // MOZ_ASSERT(i != mAllocations.NoIndex);
   {
-    MonitorAutoLock lock(mMonitor);
-    mSources.AppendElement(aStream);
-    mPrincipalHandles.AppendElement(aPrincipalHandle);
-    MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
+    MutexAutoLock lock(mMutex);
+    allocation->mStream = aStream;
+    allocation->mTrackID = aTrackID;
+    allocation->mPrincipal = aPrincipal;
   }
 
   AudioSegment* segment = new AudioSegment();
 
-  aStream->AddAudioTrack(aID, aStream->GraphRate(), 0, segment, SourceMediaStream::ADDTRACK_QUEUED);
+  aStream->AddAudioTrack(aTrackID,
+                         aStream->GraphRate(),
+                         0,
+                         segment,
+                         SourceMediaStream::ADDTRACK_QUEUED);
 
   // XXX Make this based on the pref.
   aStream->RegisterForAudioMixing();
-  LOG(("Start audio for stream %p", aStream));
 
   if (!mListener) {
-    mListener = new mozilla::WebRTCAudioDataListener(this);
+    mListener = new WebRTCAudioDataListener(this);
   }
-  if (mState == kStarted) {
-    MOZ_ASSERT(aID == mTrackID);
-    // Make sure we're associated with this stream
-    mAudioInput->StartRecording(aStream, mListener);
-    return NS_OK;
-  }
-  mState = kStarted;
-  mTrackID = aID;
 
   // Make sure logger starts before capture
   AsyncLatencyLogger::Get(true);
 
-  mAudioInput->StartRecording(aStream, mListener);
+  // Must be *before* StartSend() so it will notice we selected external input (full_duplex)
+  mAudioInput->StartRecording(allocation->mStream, mListener);
+
+  if (mState == kStarted) {
+    return NS_OK;
+  }
+  MOZ_ASSERT(mState == kAllocated || mState == kStopped);
+
+  {
+    MutexAutoLock lock(mMutex);
+    mState = kStarted;
+  }
 
   return NS_OK;
 }
 
 nsresult
-MediaEngineWebRTCMicrophoneSource::Stop(SourceMediaStream *aSource, TrackID aID)
+MediaEngineWebRTCMicrophoneSource::Stop(SourceMediaStream *aStream, TrackID aTrackID)
 {
   AssertIsOnOwningThread();
-  {
-    MonitorAutoLock lock(mMonitor);
+
+  aStream->EndTrack(aTrackID);
 
-    size_t sourceIndex = mSources.IndexOf(aSource);
-    if (sourceIndex == mSources.NoIndex) {
-      // Already stopped - this is allowed
-      return NS_OK;
+  class StreamComparator {
+  public:
+    bool Equals(const Allocation& aItem,
+                const RefPtr<SourceMediaStream>& aStream) const
+    {
+      return aItem.mStream == aStream;
     }
-    mSources.RemoveElementAt(sourceIndex);
-    mPrincipalHandles.RemoveElementAt(sourceIndex);
-    MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
+  };
 
-    aSource->EndTrack(aID);
+  MutexAutoLock lock(mMutex);
+  MOZ_ASSERT(mAllocations.RemoveElement(aStream, StreamComparator()));
+
+  mAudioInput->StopRecording(aStream);
 
-    if (!mSources.IsEmpty()) {
-      mAudioInput->StopRecording(aSource);
-      return NS_OK;
-    }
-    if (mState != kStarted) {
-      return NS_ERROR_FAILURE;
-    }
+  if (!mAllocations.IsEmpty()) {
+    // Another track is keeping us from stopping
+    return NS_OK;
+  }
 
+  MOZ_ASSERT(mState == kStarted, "Should be started when stopping");
+  {
+    MutexAutoLock lock(mMutex);
     mState = kStopped;
   }
+
   if (mListener) {
     // breaks a cycle, since the WebRTCAudioDataListener has a RefPtr to us
     mListener->Shutdown();
     mListener = nullptr;
   }
 
-  mAudioInput->StopRecording(aSource);
-
   return NS_OK;
 }
 
 void
-MediaEngineWebRTCMicrophoneSource::NotifyPull(MediaStreamGraph *aGraph,
-                                              SourceMediaStream *aSource,
-                                              TrackID aID,
-                                              StreamTime aDesiredTime,
-                                              const PrincipalHandle& aPrincipalHandle)
+MediaEngineWebRTCMicrophoneSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
+                                        const RefPtr<SourceMediaStream>& aStream,
+                                        TrackID aTrackID,
+                                        StreamTime aDesiredTime,
+                                        const PrincipalHandle& aPrincipalHandle)
 {
   LOG_FRAMES(("NotifyPull, desired = %" PRId64, (int64_t) aDesiredTime));
 
-  StreamTime delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
+  StreamTime delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
   if (delta <= 0) {
     return;
   }
 
   // Not enough data has been pushed so we fill it with silence.
   // This could be due to underruns or because we have been stopped.
 
   AudioSegment audio;
   audio.AppendNullData(delta);
-  aSource->AppendToTrack(aID, &audio);
+  aStream->AppendToTrack(aTrackID, &audio);
 }
 
 void
 MediaEngineWebRTCMicrophoneSource::NotifyOutputData(MediaStreamGraph* aGraph,
                                                     AudioDataValue* aBuffer,
                                                     size_t aFrames,
                                                     TrackRate aRate,
                                                     uint32_t aChannels)
@@ -722,17 +889,16 @@ MediaEngineWebRTCMicrophoneSource::Packe
     StreamConfig outputConfig = inputConfig;
 
     // Bug 1404965: Get the right delay here, it saves some work down the line.
     mAudioProcessing->set_stream_delay_ms(0);
 
     // Bug 1414837: find a way to not allocate here.
     RefPtr<SharedBuffer> buffer =
       SharedBuffer::Create(mPacketizerInput->PacketSize() * aChannels * sizeof(float));
-    AudioSegment segment;
 
     // Prepare channel pointers to the SharedBuffer created above.
     AutoTArray<float*, 8> processedOutputChannelPointers;
     AutoTArray<const float*, 8> processedOutputChannelPointersConst;
     processedOutputChannelPointers.SetLength(aChannels);
     processedOutputChannelPointersConst.SetLength(aChannels);
 
     offset = 0;
@@ -741,75 +907,90 @@ MediaEngineWebRTCMicrophoneSource::Packe
       processedOutputChannelPointersConst[i] = static_cast<float*>(buffer->Data()) + offset;
       offset += mPacketizerInput->PacketSize();
     }
 
     mAudioProcessing->ProcessStream(deinterleavedPacketizedInputDataChannelPointers.Elements(),
                                     inputConfig,
                                     outputConfig,
                                     processedOutputChannelPointers.Elements());
-    MonitorAutoLock lock(mMonitor);
-    if (mState != kStarted)
+    MutexAutoLock lock(mMutex);
+    if (mState != kStarted) {
       return;
+    }
 
-    for (size_t i = 0; i < mSources.Length(); ++i) {
-      if (!mSources[i]) { // why ?!
+    AudioSegment segment;
+    for (const Allocation& allocation : mAllocations) {
+      if (!allocation.mStream) {
         continue;
       }
 
       // We already have planar audio data of the right format. Insert into the
       // MSG.
       MOZ_ASSERT(processedOutputChannelPointers.Length() == aChannels);
       RefPtr<SharedBuffer> other = buffer;
       segment.AppendFrames(other.forget(),
                            processedOutputChannelPointersConst,
                            mPacketizerInput->PacketSize(),
-                           mPrincipalHandles[i]);
-      mSources[i]->AppendToTrack(mTrackID, &segment);
+                           allocation.mPrincipal);
+      allocation.mStream->AppendToTrack(allocation.mTrackID, &segment);
     }
   }
 }
 
+bool
+MediaEngineWebRTCMicrophoneSource::PassThrough() const
+{
+  return mSkipProcessing;
+}
+
+void
+MediaEngineWebRTCMicrophoneSource::SetPassThrough(bool aPassThrough)
+{
+  mSkipProcessing = aPassThrough;
+}
+
 template<typename T>
 void
 MediaEngineWebRTCMicrophoneSource::InsertInGraph(const T* aBuffer,
                                                  size_t aFrames,
                                                  uint32_t aChannels)
 {
-  MonitorAutoLock lock(mMonitor);
+  MutexAutoLock lock(mMutex);
+
   if (mState != kStarted) {
     return;
   }
 
   if (MOZ_LOG_TEST(AudioLogModule(), LogLevel::Debug)) {
     mTotalFrames += aFrames;
-    if (mTotalFrames > mLastLogFrames + mSources[0]->GraphRate()) { // ~ 1 second
+    if (!mAllocations.IsEmpty() && mAllocations[0].mStream &&
+        mTotalFrames > mLastLogFrames +
+                       mAllocations[0].mStream->GraphRate()) { // ~ 1 second
       MOZ_LOG(AudioLogModule(), LogLevel::Debug,
               ("%p: Inserting %zu samples into graph, total frames = %" PRIu64,
                (void*)this, aFrames, mTotalFrames));
       mLastLogFrames = mTotalFrames;
     }
   }
 
-  size_t len = mSources.Length();
-  for (size_t i = 0; i < len; ++i) {
-    if (!mSources[i]) {
+  for (Allocation& allocation : mAllocations) {
+    if (!allocation.mStream) {
       continue;
     }
 
     TimeStamp insertTime;
     // Make sure we include the stream and the track.
     // The 0:1 is a flag to note when we've done the final insert for a given input block.
     LogTime(AsyncLatencyLogger::AudioTrackInsertion,
-            LATENCY_STREAM_ID(mSources[i].get(), mTrackID),
-            (i+1 < len) ? 0 : 1, insertTime);
+            LATENCY_STREAM_ID(allocation.mStream.get(), allocation.mTrackID),
+            (&allocation != &mAllocations.LastElement()) ? 0 : 1, insertTime);
 
     // Bug 971528 - Support stereo capture in gUM
-    MOZ_ASSERT(aChannels >= 1 && aChannels <= 8,
-               "Support up to 8 channels");
+    MOZ_ASSERT(aChannels >= 1 && aChannels <= 8, "Support up to 8 channels");
 
     AudioSegment segment;
     RefPtr<SharedBuffer> buffer =
       SharedBuffer::Create(aFrames * aChannels * sizeof(T));
     AutoTArray<const T*, 8> channels;
     if (aChannels == 1) {
       PodCopy(static_cast<T*>(buffer->Data()), aBuffer, aFrames);
       channels.AppendElement(static_cast<T*>(buffer->Data()));
@@ -828,20 +1009,20 @@ MediaEngineWebRTCMicrophoneSource::Inser
       DeinterleaveAndConvertBuffer(aBuffer,
                                    aFrames,
                                    aChannels,
                                    write_channels.Elements());
     }
 
     MOZ_ASSERT(aChannels == channels.Length());
     segment.AppendFrames(buffer.forget(), channels, aFrames,
-                         mPrincipalHandles[i]);
+                          allocation.mPrincipal);
     segment.GetStartTime(insertTime);
 
-    mSources[i]->AppendToTrack(mTrackID, &segment);
+    allocation.mStream->AppendToTrack(allocation.mTrackID, &segment);
   }
 }
 
 // Called back on GraphDriver thread!
 // Note this can be called back after ::Shutdown()
 void
 MediaEngineWebRTCMicrophoneSource::NotifyInputData(MediaStreamGraph* aGraph,
                                                    const AudioDataValue* aBuffer,
@@ -875,17 +1056,18 @@ do {                                    
       #_processing " on device change.");                           \
       return;                                                       \
     }                                                               \
                                                                     \
   }                                                                 \
 }  while(0)
 
 void
-MediaEngineWebRTCMicrophoneSource::DeviceChanged() {
+MediaEngineWebRTCMicrophoneSource::DeviceChanged()
+{
   // Reset some processing
   ResetProcessingIfNeeded(gain_control);
   ResetProcessingIfNeeded(echo_cancellation);
   ResetProcessingIfNeeded(noise_suppression);
 }
 
 // mState records if a channel is allocated (slightly redundantly to mChannel)
 void
@@ -905,100 +1087,88 @@ MediaEngineWebRTCMicrophoneSource::Alloc
   mState = kAllocated;
   sChannelsOpen++;
   return true;
 }
 
 void
 MediaEngineWebRTCMicrophoneSource::Shutdown()
 {
-  Super::Shutdown();
+  AssertIsOnOwningThread();
+
   if (mListener) {
     // breaks a cycle, since the WebRTCAudioDataListener has a RefPtr to us
     mListener->Shutdown();
     // Don't release the webrtc.org pointers yet until the Listener is (async) shutdown
     mListener = nullptr;
   }
 
   if (mState == kStarted) {
-    SourceMediaStream *source;
-    bool empty;
-
-    while (1) {
-      {
-        MonitorAutoLock lock(mMonitor);
-        empty = mSources.IsEmpty();
-        if (empty) {
-          break;
-        }
-        source = mSources[0];
-      }
-      Stop(source, kAudioTrack); // XXX change to support multiple tracks
+    for (const Allocation& allocation : mAllocations) {
+      Stop(allocation.mStream, allocation.mTrackID);
     }
     MOZ_ASSERT(mState == kStopped);
   }
 
-  while (mRegisteredHandles.Length()) {
+  while (!mAllocations.IsEmpty()) {
     MOZ_ASSERT(mState == kAllocated || mState == kStopped);
     // on last Deallocate(), FreeChannel()s and DeInit()s if all channels are released
-    Deallocate(mRegisteredHandles[0].get());
+    Deallocate(mAllocations[0].mHandle);
   }
   MOZ_ASSERT(mState == kReleased);
 }
 
-void
-MediaEngineWebRTCAudioCaptureSource::GetName(nsAString &aName) const
+nsString
+MediaEngineWebRTCAudioCaptureSource::GetName() const
 {
-  aName.AssignLiteral("AudioCapture");
+  return NS_LITERAL_STRING(u"AudioCapture");
 }
 
-void
-MediaEngineWebRTCAudioCaptureSource::GetUUID(nsACString &aUUID) const
+nsCString
+MediaEngineWebRTCAudioCaptureSource::GetUUID() const
 {
   nsID uuid;
   char uuidBuffer[NSID_LENGTH];
   nsCString asciiString;
   ErrorResult rv;
 
   rv = nsContentUtils::GenerateUUIDInPlace(uuid);
   if (rv.Failed()) {
-    aUUID.AssignLiteral("");
-    return;
+    return NS_LITERAL_CSTRING("");
   }
 
-
   uuid.ToProvidedString(uuidBuffer);
   asciiString.AssignASCII(uuidBuffer);
 
   // Remove {} and the null terminator
-  aUUID.Assign(Substring(asciiString, 1, NSID_LENGTH - 3));
+  return nsCString(Substring(asciiString, 1, NSID_LENGTH - 3));
 }
 
 nsresult
-MediaEngineWebRTCAudioCaptureSource::Start(SourceMediaStream *aMediaStream,
-                                           TrackID aId,
-                                           const PrincipalHandle& aPrincipalHandle)
+MediaEngineWebRTCAudioCaptureSource::Start(SourceMediaStream *aStream,
+                                           TrackID aTrackID,
+                                           const PrincipalHandle& aPrincipal)
 {
   AssertIsOnOwningThread();
-  aMediaStream->AddTrack(aId, 0, new AudioSegment());
+  aStream->AddTrack(aTrackID, 0, new AudioSegment());
   return NS_OK;
 }
 
 nsresult
-MediaEngineWebRTCAudioCaptureSource::Stop(SourceMediaStream *aMediaStream,
-                                          TrackID aId)
+MediaEngineWebRTCAudioCaptureSource::Stop(SourceMediaStream *aStream,
+                                          TrackID aTrackID)
 {
   AssertIsOnOwningThread();
-  aMediaStream->EndAllTrackAndFinish();
+  aStream->EndAllTrackAndFinish();
   return NS_OK;
 }
 
 nsresult
-MediaEngineWebRTCAudioCaptureSource::Restart(
-    AllocationHandle* aHandle,
+MediaEngineWebRTCAudioCaptureSource::Reconfigure(
+    const RefPtr<AllocationHandle>& aHandle,
     const dom::MediaTrackConstraints& aConstraints,
     const MediaEnginePrefs &aPrefs,
     const nsString& aDeviceId,
     const char** aOutBadConstraint)
 {
   MOZ_ASSERT(!aHandle);
   return NS_OK;
 }
--- a/dom/media/webrtc/MediaTrackConstraints.cpp
+++ b/dom/media/webrtc/MediaTrackConstraints.cpp
@@ -1,21 +1,24 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaTrackConstraints.h"
-#include "nsIScriptError.h"
-#include "mozilla/dom/MediaStreamTrackBinding.h"
 
 #include <limits>
 #include <algorithm>
 #include <iterator>
 
+#include "MediaEngineSource.h"
+#include "MediaManager.h"
+#include "nsIScriptError.h"
+#include "mozilla/dom/MediaStreamTrackBinding.h"
+
 namespace mozilla {
 
 using dom::ConstrainBooleanParameters;
 using dom::OwningLongOrConstrainLongRange;
 
 template<class ValueType>
 template<class ConstrainRange>
 void
@@ -389,17 +392,33 @@ FlattenedConstraints::FlattenedConstrain
 // The full algorithm for all devices. Sources that don't list capabilities
 // need to fake it and hardcode some by populating mHardcodedCapabilities above.
 //
 // Fitness distance returned as integer math * 1000. Infinity = UINT32_MAX
 
 // First, all devices have a minimum distance based on their deviceId.
 // If you have no other constraints, use this one. Reused by all device types.
 
-uint32_t
+/* static */ bool
+MediaConstraintsHelper::SomeSettingsFit(const NormalizedConstraints &aConstraints,
+                                        const nsTArray<RefPtr<MediaDevice>>& aDevices)
+{
+  nsTArray<const NormalizedConstraintSet*> sets;
+  sets.AppendElement(&aConstraints);
+
+  MOZ_ASSERT(!aDevices.IsEmpty());
+  for (auto& device : aDevices) {
+    if (device->GetBestFitnessDistance(sets, false) != UINT32_MAX) {
+      return true;
+    }
+  }
+  return false;
+}
+
+/* static */ uint32_t
 MediaConstraintsHelper::GetMinimumFitnessDistance(
     const NormalizedConstraintSet &aConstraints,
     const nsString& aDeviceId)
 {
   return FitnessDistance(aDeviceId, aConstraints.mDeviceId);
 }
 
 template<class ValueType, class NormalizedRange>
@@ -450,53 +469,142 @@ MediaConstraintsHelper::FitnessDistance(
     return UINT32_MAX;
   }
   if (!aParams.mIdeal.empty() && aParams.mIdeal.find(aN) == aParams.mIdeal.end()) {
     return 1000;
   }
   return 0;
 }
 
-template<class MediaEngineSourceType>
-const char*
+/* static */ const char*
+MediaConstraintsHelper::SelectSettings(
+    const NormalizedConstraints& aConstraints,
+    nsTArray<RefPtr<MediaDevice>>& aDevices,
+    bool aIsChrome)
+{
+  auto& c = aConstraints;
+
+  // First apply top-level constraints.
+
+  // Stack constraintSets that pass, starting with the required one, because the
+  // whole stack must be re-satisfied each time a capability-set is ruled out
+  // (this avoids storing state or pushing algorithm into the lower-level code).
+  nsTArray<RefPtr<MediaDevice>> unsatisfactory;
+  nsTArray<const NormalizedConstraintSet*> aggregateConstraints;
+  aggregateConstraints.AppendElement(&c);
+
+  std::multimap<uint32_t, RefPtr<MediaDevice>> ordered;
+
+  for (uint32_t i = 0; i < aDevices.Length();) {
+    uint32_t distance =
+      aDevices[i]->GetBestFitnessDistance(aggregateConstraints, aIsChrome);
+    if (distance == UINT32_MAX) {
+      unsatisfactory.AppendElement(Move(aDevices[i]));
+      aDevices.RemoveElementAt(i);
+    } else {
+      ordered.insert(std::make_pair(distance, aDevices[i]));
+      ++i;
+    }
+  }
+  if (aDevices.IsEmpty()) {
+    return FindBadConstraint(c, unsatisfactory);
+  }
+
+  // Order devices by shortest distance
+  for (auto& ordinal : ordered) {
+    aDevices.RemoveElement(ordinal.second);
+    aDevices.AppendElement(ordinal.second);
+  }
+
+  // Then apply advanced constraints.
+
+  for (int i = 0; i < int(c.mAdvanced.size()); i++) {
+    aggregateConstraints.AppendElement(&c.mAdvanced[i]);
+    nsTArray<RefPtr<MediaDevice>> rejects;
+    for (uint32_t j = 0; j < aDevices.Length();) {
+      uint32_t distance = aDevices[j]->GetBestFitnessDistance(aggregateConstraints,
+                                                              aIsChrome);
+      if (distance == UINT32_MAX) {
+        rejects.AppendElement(Move(aDevices[j]));
+        aDevices.RemoveElementAt(j);
+      } else {
+        ++j;
+      }
+    }
+    if (aDevices.IsEmpty()) {
+      aDevices.AppendElements(Move(rejects));
+      aggregateConstraints.RemoveElementAt(aggregateConstraints.Length() - 1);
+    }
+  }
+  return nullptr;
+}
+
+/* static */ const char*
 MediaConstraintsHelper::FindBadConstraint(
     const NormalizedConstraints& aConstraints,
-    const MediaEngineSourceType& aMediaEngineSource,
+    const nsTArray<RefPtr<MediaDevice>>& aDevices)
+{
+  // The spec says to report a constraint that satisfies NONE
+  // of the sources. Unfortunately, this is a bit laborious to find out, and
+  // requires updating as new constraints are added!
+  auto& c = aConstraints;
+  dom::MediaTrackConstraints empty;
+
+  if (aDevices.IsEmpty() ||
+      !SomeSettingsFit(NormalizedConstraints(empty), aDevices)) {
+    return "";
+  }
+  {
+    NormalizedConstraints fresh(empty);
+    fresh.mDeviceId = c.mDeviceId;
+    if (!SomeSettingsFit(fresh, aDevices)) {
+      return "deviceId";
+    }
+  }
+  {
+    NormalizedConstraints fresh(empty);
+    fresh.mWidth = c.mWidth;
+    if (!SomeSettingsFit(fresh, aDevices)) {
+      return "width";
+    }
+  }
+  {
+    NormalizedConstraints fresh(empty);
+    fresh.mHeight = c.mHeight;
+    if (!SomeSettingsFit(fresh, aDevices)) {
+      return "height";
+    }
+  }
+  {
+    NormalizedConstraints fresh(empty);
+    fresh.mFrameRate = c.mFrameRate;
+    if (!SomeSettingsFit(fresh, aDevices)) {
+      return "frameRate";
+    }
+  }
+  {
+    NormalizedConstraints fresh(empty);
+    fresh.mFacingMode = c.mFacingMode;
+    if (!SomeSettingsFit(fresh, aDevices)) {
+      return "facingMode";
+    }
+  }
+  return "";
+}
+
+/* static */ const char*
+MediaConstraintsHelper::FindBadConstraint(
+    const NormalizedConstraints& aConstraints,
+    const RefPtr<MediaEngineSource>& aMediaEngineSource,
     const nsString& aDeviceId)
 {
-  class MockDevice
-  {
-  public:
-    NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MockDevice);
-
-    explicit MockDevice(const MediaEngineSourceType* aMediaEngineSource,
-                        const nsString& aDeviceId)
-    : mMediaEngineSource(aMediaEngineSource),
-      // The following dud code exists to avoid 'unused typedef' error on linux.
-      mDeviceId(MockDevice::HasThreadSafeRefCnt::value ? aDeviceId : nsString()) {}
-
-    uint32_t GetBestFitnessDistance(
-        const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
-        bool aIsChrome)
-    {
-      return mMediaEngineSource->GetBestFitnessDistance(aConstraintSets,
-                                                        mDeviceId);
-    }
-
-  private:
-    ~MockDevice() {}
-
-    const MediaEngineSourceType* mMediaEngineSource;
-    nsString mDeviceId;
-  };
-
-  Unused << typename MockDevice::HasThreadSafeRefCnt();
-
-  nsTArray<RefPtr<MockDevice>> devices;
-  devices.AppendElement(new MockDevice(&aMediaEngineSource, aDeviceId));
+  AutoTArray<RefPtr<MediaDevice>, 1> devices;
+  devices.AppendElement(MakeRefPtr<MediaDevice>(aMediaEngineSource,
+                                                aMediaEngineSource->GetName(),
+                                                aDeviceId));
   return FindBadConstraint(aConstraints, devices);
 }
 
 void
 MediaConstraintsHelper::ConvertOldWithWarning(
     const dom::OwningBooleanOrConstrainBooleanParameters& old,
     dom::OwningBooleanOrConstrainBooleanParameters& to,
     const char* aMessageName,
--- a/dom/media/webrtc/MediaTrackConstraints.h
+++ b/dom/media/webrtc/MediaTrackConstraints.h
@@ -2,26 +2,29 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 // This file should not be included by other includes, as it contains code
 
 #ifndef MEDIATRACKCONSTRAINTS_H_
 #define MEDIATRACKCONSTRAINTS_H_
 
+#include <map>
+#include <set>
+#include <vector>
+
 #include "mozilla/Attributes.h"
 #include "mozilla/dom/MediaStreamTrackBinding.h"
 #include "mozilla/dom/MediaTrackConstraintSetBinding.h"
 #include "mozilla/dom/MediaTrackSupportedConstraintsBinding.h"
 
-#include <map>
-#include <set>
-#include <vector>
+namespace mozilla {
 
-namespace mozilla {
+class MediaDevice;
+class MediaEngineSource;
 
 template<class EnumValuesStrings, class Enum>
 static const char* EnumToASCII(const EnumValuesStrings& aStrings, Enum aValue) {
   return aStrings[uint32_t(aValue)].value;
 }
 
 template<class EnumValuesStrings, class Enum>
 static Enum StringToEnum(const EnumValuesStrings& aStrings,
@@ -31,17 +34,16 @@ static Enum StringToEnum(const EnumValue
       return Enum(i);
     }
   }
   return aDefaultValue;
 }
 
 // Helper classes for orthogonal constraints without interdependencies.
 // Instead of constraining values, constrain the constraints themselves.
-
 class NormalizedConstraintSet
 {
 protected:
   class BaseRange
   {
   protected:
     typedef BaseRange NormalizedConstraintSet::* MemberPtrType;
 
@@ -292,176 +294,55 @@ struct NormalizedConstraints : public No
 struct FlattenedConstraints : public NormalizedConstraintSet
 {
   explicit FlattenedConstraints(const NormalizedConstraints& aOther);
 
   explicit FlattenedConstraints(const dom::MediaTrackConstraints& aOther)
     : FlattenedConstraints(NormalizedConstraints(aOther)) {}
 };
 
-// A helper class for MediaEngines
-
+// A helper class for MediaEngineSources
 class MediaConstraintsHelper
 {
-protected:
+public:
   template<class ValueType, class NormalizedRange>
   static uint32_t FitnessDistance(ValueType aN, const NormalizedRange& aRange);
   template<class ValueType, class NormalizedRange>
   static uint32_t FeasibilityDistance(ValueType aN, const NormalizedRange& aRange);
   static uint32_t FitnessDistance(nsString aN,
       const NormalizedConstraintSet::StringRange& aConstraint);
+protected:
+
+  static bool
+  SomeSettingsFit(const NormalizedConstraints &aConstraints,
+                  const nsTArray<RefPtr<MediaDevice>>& aDevices);
+
+public:
 
   static uint32_t
   GetMinimumFitnessDistance(const NormalizedConstraintSet &aConstraints,
                             const nsString& aDeviceId);
 
-  template<class DeviceType>
-  static bool
-  SomeSettingsFit(const NormalizedConstraints &aConstraints,
-                  nsTArray<RefPtr<DeviceType>>& aDevices)
-  {
-    nsTArray<const NormalizedConstraintSet*> sets;
-    sets.AppendElement(&aConstraints);
-
-    MOZ_ASSERT(aDevices.Length());
-    for (auto& device : aDevices) {
-      if (device->GetBestFitnessDistance(sets, false) != UINT32_MAX) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-public:
   // Apply constrains to a supplied list of devices (removes items from the list)
-
-  template<class DeviceType>
   static const char*
-  SelectSettings(const NormalizedConstraints &aConstraints,
-                 nsTArray<RefPtr<DeviceType>>& aDevices,
-                 bool aIsChrome)
-  {
-    auto& c = aConstraints;
-
-    // First apply top-level constraints.
-
-    // Stack constraintSets that pass, starting with the required one, because the
-    // whole stack must be re-satisfied each time a capability-set is ruled out
-    // (this avoids storing state or pushing algorithm into the lower-level code).
-    nsTArray<RefPtr<DeviceType>> unsatisfactory;
-    nsTArray<const NormalizedConstraintSet*> aggregateConstraints;
-    aggregateConstraints.AppendElement(&c);
-
-    std::multimap<uint32_t, RefPtr<DeviceType>> ordered;
+  SelectSettings(
+      const NormalizedConstraints& aConstraints,
+      nsTArray<RefPtr<MediaDevice>>& aDevices,
+      bool aIsChrome);
 
-    for (uint32_t i = 0; i < aDevices.Length();) {
-      uint32_t distance = aDevices[i]->GetBestFitnessDistance(aggregateConstraints,
-                                                              aIsChrome);
-      if (distance == UINT32_MAX) {
-        unsatisfactory.AppendElement(aDevices[i]);
-        aDevices.RemoveElementAt(i);
-      } else {
-        ordered.insert(std::pair<uint32_t, RefPtr<DeviceType>>(distance,
-                                                               aDevices[i]));
-        ++i;
-      }
-    }
-    if (!aDevices.Length()) {
-      return FindBadConstraint(c, unsatisfactory);
-    }
-
-    // Order devices by shortest distance
-    for (auto& ordinal : ordered) {
-      aDevices.RemoveElement(ordinal.second);
-      aDevices.AppendElement(ordinal.second);
-    }
-
-    // Then apply advanced constraints.
-
-    for (int i = 0; i < int(c.mAdvanced.size()); i++) {
-      aggregateConstraints.AppendElement(&c.mAdvanced[i]);
-      nsTArray<RefPtr<DeviceType>> rejects;
-      for (uint32_t j = 0; j < aDevices.Length();) {
-        if (aDevices[j]->GetBestFitnessDistance(aggregateConstraints,
-                                                aIsChrome) == UINT32_MAX) {
-          rejects.AppendElement(aDevices[j]);
-          aDevices.RemoveElementAt(j);
-        } else {
-          ++j;
-        }
-      }
-      if (!aDevices.Length()) {
-        aDevices.AppendElements(Move(rejects));
-        aggregateConstraints.RemoveElementAt(aggregateConstraints.Length() - 1);
-      }
-    }
-    return nullptr;
-  }
-
-  template<class DeviceType>
   static const char*
   FindBadConstraint(const NormalizedConstraints& aConstraints,
-                    nsTArray<RefPtr<DeviceType>>& aDevices)
-  {
-    // The spec says to report a constraint that satisfies NONE
-    // of the sources. Unfortunately, this is a bit laborious to find out, and
-    // requires updating as new constraints are added!
-    auto& c = aConstraints;
-    dom::MediaTrackConstraints empty;
+                    const nsTArray<RefPtr<MediaDevice>>& aDevices);
 
-    if (!aDevices.Length() ||
-        !SomeSettingsFit(NormalizedConstraints(empty), aDevices)) {
-      return "";
-    }
-    {
-      NormalizedConstraints fresh(empty);
-      fresh.mDeviceId = c.mDeviceId;
-      if (!SomeSettingsFit(fresh, aDevices)) {
-        return "deviceId";
-      }
-    }
-    {
-      NormalizedConstraints fresh(empty);
-      fresh.mWidth = c.mWidth;
-      if (!SomeSettingsFit(fresh, aDevices)) {
-        return "width";
-      }
-    }
-    {
-      NormalizedConstraints fresh(empty);
-      fresh.mHeight = c.mHeight;
-      if (!SomeSettingsFit(fresh, aDevices)) {
-        return "height";
-      }
-    }
-    {
-      NormalizedConstraints fresh(empty);
-      fresh.mFrameRate = c.mFrameRate;
-      if (!SomeSettingsFit(fresh, aDevices)) {
-        return "frameRate";
-      }
-    }
-    {
-      NormalizedConstraints fresh(empty);
-      fresh.mFacingMode = c.mFacingMode;
-      if (!SomeSettingsFit(fresh, aDevices)) {
-        return "facingMode";
-      }
-    }
-    return "";
-  }
-
-  template<class MediaEngineSourceType>
   static const char*
   FindBadConstraint(const NormalizedConstraints& aConstraints,
-                    const MediaEngineSourceType& aMediaEngineSource,
+                    const RefPtr<MediaEngineSource>& aMediaEngineSource,
                     const nsString& aDeviceId);
 
   // Warn on and convert use of deprecated constraints to new ones
-
   static void
   ConvertOldWithWarning(
       const dom::OwningBooleanOrConstrainBooleanParameters& old,
       dom::OwningBooleanOrConstrainBooleanParameters& to,
       const char* aMessageName,
       nsPIDOMWindowInner* aWindow);
 };
 
--- a/dom/media/webrtc/moz.build
+++ b/dom/media/webrtc/moz.build
@@ -9,29 +9,30 @@ with Files('*'):
     BUG_COMPONENT = ('Core', 'WebRTC: Audio/Video')
 
 with Files('PeerIdentity.*'):
     BUG_COMPONENT = ('Core', 'WebRTC: Signaling')
 
 XPIDL_MODULE = 'content_webrtc'
 
 EXPORTS += [
+    'AllocationHandle.h',
     'MediaEngine.h',
-    'MediaEngineCameraVideoSource.h',
     'MediaEngineDefault.h',
+    'MediaEnginePrefs.h',
+    'MediaEngineSource.h',
     'MediaTrackConstraints.h',
     'SineWaveGenerator.h',
 ]
 
 if CONFIG['MOZ_WEBRTC']:
     EXPORTS += ['MediaEngineRemoteVideoSource.h',
                 'MediaEngineWebRTC.h']
     EXPORTS.mozilla.dom += [ 'RTCIdentityProviderRegistrar.h' ]
     UNIFIED_SOURCES += [
-        'MediaEngineCameraVideoSource.cpp',
         'MediaEngineRemoteVideoSource.cpp',
         'MediaEngineTabVideoSource.cpp',
         'MediaEngineWebRTCAudio.cpp',
         'RTCCertificate.cpp',
         'RTCIdentityProviderRegistrar.cpp',
     ]
     # MediaEngineWebRTC.cpp needs to be built separately.
     SOURCES += [
@@ -48,16 +49,17 @@ if CONFIG['MOZ_WEBRTC']:
     ]
 
 XPIDL_SOURCES += [
     'nsITabSource.idl'
 ]
 
 UNIFIED_SOURCES += [
     'MediaEngineDefault.cpp',
+    'MediaEngineSource.cpp',
     'MediaTrackConstraints.cpp',
     'PeerIdentity.cpp',
 ]
 
 EXPORTS.mozilla += [
     'PeerIdentity.h',
 ]
 EXPORTS.mozilla.dom += [
--- a/dom/media/webspeech/recognition/SpeechRecognition.cpp
+++ b/dom/media/webspeech/recognition/SpeechRecognition.cpp
@@ -16,16 +16,17 @@
 #include "mozilla/dom/MediaStreamError.h"
 #include "mozilla/MediaManager.h"
 #include "mozilla/Preferences.h"
 #include "MediaPrefs.h"
 #include "mozilla/Services.h"
 
 #include "AudioSegment.h"
 #include "DOMMediaStream.h"
+#include "MediaEnginePrefs.h"
 #include "endpointer.h"
 
 #include "mozilla/dom/SpeechRecognitionEvent.h"
 #include "nsContentUtils.h"
 #include "nsIDocument.h"
 #include "nsIObserverService.h"
 #include "nsIPermissionManager.h"
 #include "nsIPrincipal.h"