--- a/dom/media/systemservices/CamerasParent.cpp
+++ b/dom/media/systemservices/CamerasParent.cpp
@@ -34,16 +34,41 @@
mozilla::LazyLogModule gCamerasParentLog("CamerasParent");
#define LOG(args) MOZ_LOG(gCamerasParentLog, mozilla::LogLevel::Debug, args)
#define LOG_VERBOSE(args) MOZ_LOG(gCamerasParentLog, mozilla::LogLevel::Verbose, args)
#define LOG_ENABLED() MOZ_LOG_TEST(gCamerasParentLog, mozilla::LogLevel::Debug)
namespace mozilla {
namespace camera {
+std::map<uint32_t, const char *> sDeviceUniqueIDs;
+std::map<uint32_t, webrtc::VideoCaptureCapability> sAllRequestedCapabilities;
+
+uint32_t
+ResolutionFeasibilityDistance(int32_t candidate, int32_t requested)
+{
+ // The purpose of this function is to find a smallest resolution
+ // which is larger than all requested capabilities.
+ // Then we can use down-scaling to fulfill each request.
+ uint32_t distance;
+ if (candidate >= requested) {
+ distance = (candidate - requested) * 1000 / std::max(candidate, requested);
+ } else {
+ distance = (UINT32_MAX / 2) + (requested - candidate) *
+ 1000 / std::max(candidate, requested);
+ }
+ return distance;
+}
+
+uint32_t
+FeasibilityDistance(int32_t candidate, int32_t requested)
+{
+ return std::abs(candidate - requested) * 1000 / std::max(candidate, requested);
+}
+
RefPtr<VideoEngine> CamerasParent::sEngines[CaptureEngine::MaxEngine];
int32_t CamerasParent::sNumOfOpenCamerasParentEngines = 0;
int32_t CamerasParent::sNumOfCamerasParents = 0;
base::Thread* CamerasParent::sVideoCaptureThread = nullptr;
Monitor* CamerasParent::sThreadMonitor = nullptr;
StaticMutex CamerasParent::sMutex;
// 3 threads are involved in this code:
@@ -551,16 +576,27 @@ CamerasParent::RecvGetCaptureCapability(
RefPtr<Runnable> webrtc_runnable =
media::NewRunnableFrom([self, unique_id, aCapEngine, num]() -> nsresult {
webrtc::VideoCaptureCapability webrtcCaps;
int error = -1;
if (auto engine = self->EnsureInitialized(aCapEngine)) {
if (auto devInfo = engine->GetOrCreateVideoCaptureDeviceInfo()){
error = devInfo->GetCapability(unique_id.get(), num, webrtcCaps);
}
+
+ if (!error && aCapEngine == CameraEngine) {
+ auto iter = self->mAllCandidateCapabilities.find(unique_id);
+ if (iter == self->mAllCandidateCapabilities.end()) {
+ std::map<uint32_t, webrtc::VideoCaptureCapability> candidateCapabilities;
+ candidateCapabilities.emplace(num, webrtcCaps);
+ self->mAllCandidateCapabilities.emplace(nsCString(unique_id), candidateCapabilities);
+ } else {
+ (iter->second).emplace(num, webrtcCaps);
+ }
+ }
}
RefPtr<nsIRunnable> ipc_runnable =
media::NewRunnableFrom([self, webrtcCaps, error]() -> nsresult {
if (self->IsShuttingDown()) {
return NS_ERROR_FAILURE;
}
VideoCaptureCapability capCap(webrtcCaps.width,
webrtcCaps.height,
@@ -811,32 +847,81 @@ CamerasParent::RecvStartCapture(const Ca
CallbackHelper** cbh;
VideoEngine* engine = nullptr;
int error = -1;
if (self->EnsureInitialized(aCapEngine)) {
cbh = self->mCallbacks.AppendElement(
new CallbackHelper(static_cast<CaptureEngine>(aCapEngine), capnum, self));
engine = self->sEngines[aCapEngine];
- engine->WithEntry(capnum, [&error, &ipcCaps, &cbh](VideoEngine::CaptureEntry& cap) {
- error = 0;
+ engine->WithEntry(capnum,
+ [&capnum, &aCapEngine, &error, &ipcCaps, &cbh, self]
+ (VideoEngine::CaptureEntry& cap) {
webrtc::VideoCaptureCapability capability;
capability.width = ipcCaps.width();
capability.height = ipcCaps.height();
capability.maxFPS = ipcCaps.maxFPS();
capability.expectedCaptureDelay = ipcCaps.expectedCaptureDelay();
capability.rawType = static_cast<webrtc::RawVideoType>(ipcCaps.rawType());
capability.codecType = static_cast<webrtc::VideoCodecType>(ipcCaps.codecType());
capability.interlaced = ipcCaps.interlaced();
- if (!error) {
- error = cap.VideoCapture()->StartCapture(capability);
+ if (aCapEngine == CameraEngine) {
+#ifdef DEBUG
+ auto deviceUniqueID = sDeviceUniqueIDs.find(capnum);
+ MOZ_ASSERT(deviceUniqueID == sDeviceUniqueIDs.end());
+#endif
+ sDeviceUniqueIDs.emplace(capnum, cap.VideoCapture()->CurrentDeviceName());
+ sAllRequestedCapabilities.emplace(capnum, capability);
+
+ for (const auto &it : sDeviceUniqueIDs) {
+ if (strcmp(it.second, cap.VideoCapture()->CurrentDeviceName()) == 0) {
+ capability.width = std::max(
+ capability.width, sAllRequestedCapabilities[it.first].width);
+ capability.height = std::max(
+ capability.height, sAllRequestedCapabilities[it.first].height);
+ capability.maxFPS = std::max(
+ capability.maxFPS, sAllRequestedCapabilities[it.first].maxFPS);
+ }
+ }
+
+ auto candidateCapabilities = self->mAllCandidateCapabilities.find(
+ nsCString(cap.VideoCapture()->CurrentDeviceName()));
+ MOZ_ASSERT(candidateCapabilities != self->mAllCandidateCapabilities.end());
+ MOZ_ASSERT(candidateCapabilities->second.size() > 0);
+ int32_t minIdx = -1;
+ uint64_t minDistance = UINT64_MAX;
+
+ for (auto & candidateCapability : candidateCapabilities->second) {
+ if (candidateCapability.second.rawType != capability.rawType) {
+ continue;
+ }
+ // The first priority is finding a suitable resolution.
+ // So here we raise the weight of width and height
+ uint64_t distance =
+ uint64_t(ResolutionFeasibilityDistance(
+ candidateCapability.second.width, capability.width)) +
+ uint64_t(ResolutionFeasibilityDistance(
+ candidateCapability.second.height, capability.height)) +
+ uint64_t(FeasibilityDistance(
+ candidateCapability.second.maxFPS, capability.maxFPS));
+ if (distance < minDistance) {
+ minIdx = candidateCapability.first;;
+ minDistance = distance;
+ }
+ }
+ MOZ_ASSERT(minIdx != -1);
+ capability = candidateCapabilities->second[minIdx];
}
+
+ error = cap.VideoCapture()->StartCapture(capability);
+
if (!error) {
- cap.VideoCapture()->RegisterCaptureDataCallback(static_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(*cbh));
+ cap.VideoCapture()->RegisterCaptureDataCallback(
+ static_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(*cbh));
}
});
}
RefPtr<nsIRunnable> ipc_runnable =
media::NewRunnableFrom([self, error]() -> nsresult {
if (self->IsShuttingDown()) {
return NS_ERROR_FAILURE;
}
@@ -861,21 +946,26 @@ CamerasParent::StopCapture(const Capture
{
if (auto engine = EnsureInitialized(aCapEngine)) {
// we're removing elements, iterate backwards
for (size_t i = mCallbacks.Length(); i > 0; i--) {
if (mCallbacks[i - 1]->mCapEngine == aCapEngine &&
mCallbacks[i - 1]->mStreamId == (uint32_t)capnum) {
CallbackHelper* cbh = mCallbacks[i-1];
- engine->WithEntry(capnum,[cbh](VideoEngine::CaptureEntry& cap) {
+ engine->WithEntry(capnum,[cbh, &capnum, &aCapEngine](VideoEngine::CaptureEntry& cap){
if (cap.VideoCapture()) {
cap.VideoCapture()->DeRegisterCaptureDataCallback(
static_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(cbh));
cap.VideoCapture()->StopCaptureIfAllClientsClose();
+
+ if (aCapEngine == CameraEngine) {
+ sDeviceUniqueIDs.erase(capnum);
+ sAllRequestedCapabilities.erase(capnum);
+ }
}
});
delete mCallbacks[i - 1];
mCallbacks.RemoveElementAt(i - 1);
break;
}
}
--- a/dom/media/systemservices/CamerasParent.h
+++ b/dom/media/systemservices/CamerasParent.h
@@ -159,16 +159,18 @@ protected:
// Shutdown handling
bool mChildIsAlive;
bool mDestroyed;
// Above 2 are PBackground only, but this is potentially
// read cross-thread.
mozilla::Atomic<bool> mWebRTCAlive;
RefPtr<InputObserver> mCameraObserver;
+ std::map<nsCString, std::map<uint32_t, webrtc::VideoCaptureCapability>>
+ mAllCandidateCapabilities;
};
PCamerasParent* CreateCamerasParent();
} // namespace camera
} // namespace mozilla
#endif // mozilla_CameraParent_h