Bug 1355048: P8. Implement WebrtcMediaDataDecoderCodec. r?jesup draft
authorJean-Yves Avenard <jyavenard@mozilla.com>
Sat, 01 Jul 2017 01:51:00 +0200
changeset 609798 7d339dcf86c1a8ec504163d2300cfa6adae3d444
parent 609797 4423d1aec2895b94933ad9ab7ac49ce91303cc02
child 637661 6996cab8614f6f0cb8482480cf76a93400c3a9ec
push id68676
push userbmo:jyavenard@mozilla.com
push dateMon, 17 Jul 2017 13:51:59 +0000
reviewersjesup
bugs1355048
milestone56.0a1
Bug 1355048: P8. Implement WebrtcMediaDataDecoderCodec. r?jesup This provides the abilty to use the PlatformDecoderModule interface, including hardware accelerated ones. Code is disabled by default via the media.navigator.mediadatadecoder_enabled preference. MozReview-Commit-ID: 7bWJXEK8CoO
media/webrtc/signaling/src/media-conduit/MediaDataDecoderCodec.cpp
media/webrtc/signaling/src/media-conduit/MediaDataDecoderCodec.h
media/webrtc/signaling/src/media-conduit/WebrtcMediaDataDecoderCodec.cpp
media/webrtc/signaling/src/media-conduit/WebrtcMediaDataDecoderCodec.h
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
--- a/media/webrtc/signaling/src/media-conduit/MediaDataDecoderCodec.cpp
+++ b/media/webrtc/signaling/src/media-conduit/MediaDataDecoderCodec.cpp
@@ -1,23 +1,37 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaDataDecoderCodec.h"
+#include "MediaPrefs.h"
+#include "WebrtcMediaDataDecoderCodec.h"
 
 namespace mozilla {
 
 /* static */ WebrtcVideoEncoder*
 MediaDataDecoderCodec::CreateEncoder(
   webrtc::VideoCodecType aCodecType)
 {
   return nullptr;
 }
 
 /* static */ WebrtcVideoDecoder*
 MediaDataDecoderCodec::CreateDecoder(
-  webrtc::VideoCodecType aCodecbType)
+  webrtc::VideoCodecType aCodecType)
 {
-  return nullptr;
+  if (!MediaPrefs::MediaDataDecoderEnabled()) {
+    return nullptr;
+  }
+
+  switch (aCodecType) {
+    case webrtc::VideoCodecType::kVideoCodecVP8:
+    case webrtc::VideoCodecType::kVideoCodecVP9:
+    case webrtc::VideoCodecType::kVideoCodecH264:
+      break;
+    default:
+      return nullptr;
+  }
+  return new WebrtcMediaDataDecoder();
 }
 
 } // namespace mozilla
\ No newline at end of file
--- a/media/webrtc/signaling/src/media-conduit/MediaDataDecoderCodec.h
+++ b/media/webrtc/signaling/src/media-conduit/MediaDataDecoderCodec.h
@@ -2,20 +2,21 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MEDIA_DATA_DECODER_CODEC_H_
 #define MEDIA_DATA_DECODER_CODEC_H_
 
 #include "MediaConduitInterface.h"
 #include "webrtc/common_types.h"
-#include "webrtc/video_decoder.h"
 
 namespace mozilla {
 
+class WebrtcVideoDecoder;
+class WebrtcVideoEncoder;
 class MediaDataDecoderCodec
 {
  public:
   /**
    * Create encoder object for codec type |aCodecType|. Return |nullptr| when
    * failed.
    */
   static WebrtcVideoEncoder* CreateEncoder(
--- a/media/webrtc/signaling/src/media-conduit/WebrtcMediaDataDecoderCodec.cpp
+++ b/media/webrtc/signaling/src/media-conduit/WebrtcMediaDataDecoderCodec.cpp
@@ -1,51 +1,255 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "WebrtcMediaDataDecoderCodec.h"
-#include "PlatformDecoderModule.h"
+#include "ImageContainer.h"
+#include "Layers.h"
+#include "PDMFactory.h"
+#include "VideoUtils.h"
+#include "mozilla/layers/ImageBridgeChild.h"
+#include "webrtc/base/keep_ref_until_done.h"
 
 namespace mozilla {
 
-class MediaDataDecoder;
-
 WebrtcMediaDataDecoder::WebrtcMediaDataDecoder()
+  : mTaskQueue(
+      new TaskQueue(GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER),
+                    "WebrtcMediaDataDecoder::mTaskQueue"))
+  , mImageContainer(layers::LayerManager::CreateImageContainer(
+      layers::ImageContainer::ASYNCHRONOUS))
+  , mFactory(new PDMFactory())
+  , mMonitor("WebrtcMediaDataDecoder")
 {
 }
 
 WebrtcMediaDataDecoder::~WebrtcMediaDataDecoder()
 {
+  mTaskQueue->BeginShutdown();
+  mTaskQueue->AwaitShutdownAndIdle();
 }
 
 int32_t
-WebrtcMediaDataDecoder::InitDecode(const webrtc::VideoCodec* codecSettings,
-                                   int32_t numberOfCores)
+WebrtcMediaDataDecoder::InitDecode(const webrtc::VideoCodec* aCodecSettings,
+                                   int32_t aNumberOfCores)
 {
-  return 0;
+  nsCString codec;
+  switch (aCodecSettings->codecType) {
+    case webrtc::VideoCodecType::kVideoCodecVP8:
+      codec = "video/webm; codecs=vp8";
+      break;
+    case webrtc::VideoCodecType::kVideoCodecVP9:
+      codec = "video/webm; codecs=vp9";
+      break;
+    case webrtc::VideoCodecType::kVideoCodecH264:
+      codec = "video/avc";
+      break;
+    default:
+      return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  mTrackType = TrackInfo::kVideoTrack;
+
+  mInfo = VideoInfo(aCodecSettings->width, aCodecSettings->height);
+  mInfo.mMimeType = codec;
+
+  RefPtr<layers::KnowsCompositor> knowsCompositor =
+    layers::ImageBridgeChild::GetSingleton();
+
+  mDecoder = mFactory->CreateDecoder(
+    { mInfo,
+      mTaskQueue,
+      CreateDecoderParams::OptionSet(CreateDecoderParams::Option::LowLatency),
+      mTrackType,
+      mImageContainer,
+      knowsCompositor });
+
+  if (!mDecoder) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  MonitorAutoLock lock(mMonitor);
+  bool done = false;
+  mDecoder->Init()->Then(mTaskQueue,
+                         __func__,
+                         [&](TrackInfo::TrackType) {
+                           MonitorAutoLock lock(mMonitor);
+                           done = true;
+                           mMonitor.Notify();
+                         },
+                         [&](const MediaResult& aError) {
+                           MonitorAutoLock lock(mMonitor);
+                           done = true;
+                           mError = aError;
+                           mMonitor.Notify();
+                         });
+
+  while (!done) {
+    mMonitor.Wait();
+  }
+
+  return NS_SUCCEEDED(mError) ? WEBRTC_VIDEO_CODEC_OK : WEBRTC_VIDEO_CODEC_ERROR;
 }
 
 int32_t
 WebrtcMediaDataDecoder::Decode(
-  const webrtc::EncodedImage& inputImage,
-  bool missingFrames,
-  const webrtc::RTPFragmentationHeader* fragmentation,
-  const webrtc::CodecSpecificInfo* codecSpecificInfo,
-  int64_t renderTimeMs)
+  const webrtc::EncodedImage& aInputImage,
+  bool aMissingFrames,
+  const webrtc::RTPFragmentationHeader* aFragmentation,
+  const webrtc::CodecSpecificInfo* aCodecSpecificInfo,
+  int64_t aRenderTimeMs)
 {
-  return 0;
+  if (!mCallback || !mDecoder) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+
+  if (!aInputImage._buffer || !aInputImage._length) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+
+  // Always start with a complete key frame.
+  if (mNeedKeyframe) {
+    if (aInputImage._frameType != webrtc::FrameType::kVideoFrameKey)
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    // We have a key frame - is it complete?
+    if (aInputImage._completeFrame) {
+      mNeedKeyframe = false;
+    } else {
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+  }
+
+  RefPtr<MediaRawData> compressedFrame =
+    new MediaRawData(aInputImage._buffer, aInputImage._length);
+  if (!compressedFrame->Data()) {
+    return WEBRTC_VIDEO_CODEC_MEMORY;
+  }
+
+  compressedFrame->mTime =
+    media::TimeUnit::FromMicroseconds(aInputImage._timeStamp);
+  compressedFrame->mTimecode =
+    media::TimeUnit::FromMicroseconds(aRenderTimeMs * 1000);
+  compressedFrame->mKeyframe =
+    aInputImage._frameType == webrtc::FrameType::kVideoFrameKey;
+  {
+    MonitorAutoLock lock(mMonitor);
+    bool done = false;
+    mDecoder->Decode(compressedFrame)->Then(
+      mTaskQueue,
+      __func__,
+      [&](const MediaDataDecoder::DecodedData& aResults) {
+        MonitorAutoLock lock(mMonitor);
+        mResults = aResults;
+        done = true;
+        mMonitor.Notify();
+      },
+      [&](const MediaResult& aError) {
+        MonitorAutoLock lock(mMonitor);
+        mError = aError;
+        done = true;
+        mMonitor.Notify();
+      });
+
+    while (!done) {
+      mMonitor.Wait();
+    }
+
+    for (auto& frame : mResults) {
+      MOZ_ASSERT(frame->mType == MediaData::VIDEO_DATA);
+      RefPtr<VideoData> video = frame->As<VideoData>();
+      MOZ_ASSERT(video);
+      if (!video->mImage) {
+        // Nothing to display.
+        continue;
+      }
+      rtc::scoped_refptr<ImageBuffer> image(
+        new rtc::RefCountedObject<ImageBuffer>(Move(video->mImage)));
+
+      webrtc::VideoFrame videoFrame(image,
+                                    frame->mTime.ToMicroseconds(),
+                                    frame->mDuration.ToMicroseconds() * 1000,
+                                    aInputImage.rotation_);
+      mCallback->Decoded(videoFrame);
+    }
+    mResults.Clear();
+  }
+  return NS_SUCCEEDED(mError) ? WEBRTC_VIDEO_CODEC_OK
+                              : WEBRTC_VIDEO_CODEC_ERROR;
 }
 
 int32_t
 WebrtcMediaDataDecoder::RegisterDecodeCompleteCallback(
-  webrtc::DecodedImageCallback* callback)
+  webrtc::DecodedImageCallback* aCallback)
 {
-  return 0;
+  mCallback = aCallback;
+  return WEBRTC_VIDEO_CODEC_OK;
 }
 
 int32_t
 WebrtcMediaDataDecoder::Release()
 {
-  return 0;
+  MonitorAutoLock lock(mMonitor);
+  bool done = false;
+  mDecoder->Flush()
+    ->Then(mTaskQueue,
+           __func__,
+           [this]() { return mDecoder->Shutdown(); },
+           [this](const MediaResult& aError) { return mDecoder->Shutdown(); })
+    ->Then(mTaskQueue,
+           __func__,
+           [&]() {
+             MonitorAutoLock lock(mMonitor);
+             done = true;
+             mMonitor.Notify();
+           },
+           []() { MOZ_ASSERT_UNREACHABLE("Shutdown promise always resolved"); });
+
+  while (!done) {
+    mMonitor.Wait();
+  }
+
+  mDecoder = nullptr;
+  mNeedKeyframe = true;
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool
+WebrtcMediaDataDecoder::OnTaskQueue() const
+{
+  return OwnerThread()->IsCurrentThreadIn();
+}
+
+ImageBuffer::ImageBuffer(RefPtr<layers::Image>&& aImage)
+  : webrtc::NativeHandleBuffer(aImage,
+                               aImage->GetSize().width,
+                               aImage->GetSize().height)
+  , mImage(Move(aImage))
+{
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+ImageBuffer::NativeToI420Buffer()
+{
+  RefPtr<layers::PlanarYCbCrImage> image = mImage->AsPlanarYCbCrImage();
+  if (!image) {
+    // TODO. YUV420 ReadBack, Image only provides a RGB readback.
+    return nullptr;
+  }
+  rtc::scoped_refptr<layers::PlanarYCbCrImage> refImage(image);
+  const layers::PlanarYCbCrData* data = image->GetData();
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> buf(
+    new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
+      data->mPicSize.width,
+      data->mPicSize.height,
+      data->mYChannel,
+      data->mYStride,
+      data->mCbChannel,
+      data->mCbCrStride,
+      data->mCrChannel,
+      data->mCbCrStride,
+      rtc::KeepRefUntilDone(refImage)));
+  return buf;
 }
 
 } // namespace mozilla
--- a/media/webrtc/signaling/src/media-conduit/WebrtcMediaDataDecoderCodec.h
+++ b/media/webrtc/signaling/src/media-conduit/WebrtcMediaDataDecoderCodec.h
@@ -1,44 +1,83 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef WebrtcMediaDataDecoderCodec_h__
 #define WebrtcMediaDataDecoderCodec_h__
 
 #include "MediaConduitInterface.h"
-#include "mozilla/RefPtr.h"
-
+#include "MediaInfo.h"
+#include "MediaResult.h"
+#include "PlatformDecoderModule.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
 #include "webrtc/modules/video_coding/include/video_codec_interface.h"
 
+namespace webrtc {
+  class DecodedImageCallback;
+}
 namespace mozilla {
+namespace layers {
+  class Image;
+  class ImageContainer;
+}
 
-class MediaDataDecoder;
+class PDMFactory;
+class TaskQueue;
+
+class ImageBuffer : public webrtc::NativeHandleBuffer
+{
+public:
+  explicit ImageBuffer(RefPtr<layers::Image>&& aImage);
+  rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
+
+private:
+  RefPtr<layers::Image> mImage;
+};
 
 class WebrtcMediaDataDecoder : public WebrtcVideoDecoder
 {
 public:
   WebrtcMediaDataDecoder();
 
-  virtual ~WebrtcMediaDataDecoder();
-
   // Implement VideoDecoder interface.
   uint64_t PluginID() const override { return 0; }
 
   int32_t InitDecode(const webrtc::VideoCodec* codecSettings,
                      int32_t numberOfCores) override;
 
   int32_t Decode(const webrtc::EncodedImage& inputImage,
                  bool missingFrames,
                  const webrtc::RTPFragmentationHeader* fragmentation,
                  const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL,
                  int64_t renderTimeMs = -1) override;
 
   int32_t RegisterDecodeCompleteCallback(
     webrtc::DecodedImageCallback* callback) override;
 
   int32_t Release() override;
+
+private:
+  ~WebrtcMediaDataDecoder();
+  void QueueFrame(MediaRawData* aFrame);
+  AbstractThread* OwnerThread() const { return mTaskQueue; }
+  bool OnTaskQueue() const;
+
+  const RefPtr<TaskQueue> mTaskQueue;
+  const RefPtr<layers::ImageContainer> mImageContainer;
+  const RefPtr<PDMFactory> mFactory;
+  RefPtr<MediaDataDecoder> mDecoder;
+  webrtc::DecodedImageCallback* mCallback = nullptr;
+  VideoInfo mInfo;
+  TrackInfo::TrackType mTrackType;
+  bool mNeedKeyframe = true;
+  MozPromiseRequestHolder<MediaDataDecoder::DecodePromise> mDecodeRequest;
+
+  Monitor mMonitor;
+  // Members below are accessed via mMonitor
+  MediaResult mError = NS_OK;
+  MediaDataDecoder::DecodedData mResults;
 };
 
 } // namespace mozilla
 
 #endif // WebrtcMediaDataDecoderCodec_h__
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -2193,16 +2193,25 @@ public:
     width_ = width;
     height_ = height;
   }
 
   void RenderVideoFrame(const webrtc::VideoFrameBuffer& buffer,
                         uint32_t time_stamp,
                         int64_t render_time)
   {
+    if (buffer.native_handle()) {
+      // We assume that only native handles are used with the
+      // WebrtcMediaDataDecoderCodec decoder.
+      RefPtr<Image> image = static_cast<Image*>(buffer.native_handle());
+      MutexAutoLock lock(mutex_);
+      image_ = image;
+      return;
+    }
+
     MOZ_ASSERT(buffer.DataY());
     // Create a video frame using |buffer|.
     RefPtr<PlanarYCbCrImage> yuvImage =
       image_container_->CreatePlanarYCbCrImage();
 
     PlanarYCbCrData yuvData;
     yuvData.mYChannel = const_cast<uint8_t*>(buffer.DataY());
     yuvData.mYSize = IntSize(buffer.width(), buffer.height());