Bug 1347439 - part 5: let VideoData::CreateFromImage() accept only neccessary parameters. r?jya draft
authorJohn Lin <jolin@mozilla.com>
Thu, 09 Mar 2017 12:06:24 +0800
changeset 498977 89c6bd68b1eb81959c6393082681db44a0aa2c96
parent 498976 a6b858e55d989556ced97e8160d6d744c573ea1e
child 498978 0ada04dc26f98c5cf44eceaef0422dbef9e04505
push id49299
push userbmo:jolin@mozilla.com
push dateWed, 15 Mar 2017 08:25:49 +0000
reviewersjya
bugs1347439, 1344649, 1336431
milestone53.0
Bug 1347439 - part 5: let VideoData::CreateFromImage() accept only neccessary parameters. r?jya Cherry-picked from bug 1344649 part 2 for bug 1336431 MozReview-Commit-ID: Bvu1PivfOj4
dom/media/MediaData.cpp
dom/media/MediaData.h
dom/media/android/AndroidMediaReader.cpp
dom/media/ipc/VideoDecoderChild.cpp
dom/media/platforms/android/RemoteDataDecoder.cpp
dom/media/platforms/apple/AppleVTDecoder.cpp
dom/media/platforms/wmf/WMFVideoMFTManager.cpp
--- a/dom/media/MediaData.cpp
+++ b/dom/media/MediaData.cpp
@@ -425,31 +425,30 @@ VideoData::CreateAndCopyData(const Video
                       argb_buffer, size.width * 4,
                       size.width, size.height);
 
   return v.forget();
 }
 
 /* static */
 already_AddRefed<VideoData>
-VideoData::CreateFromImage(const VideoInfo& aInfo,
+VideoData::CreateFromImage(const IntSize& aDisplay,
                            int64_t aOffset,
                            int64_t aTime,
                            int64_t aDuration,
                            const RefPtr<Image>& aImage,
                            bool aKeyframe,
-                           int64_t aTimecode,
-                           const IntRect& aPicture)
+                           int64_t aTimecode)
 {
   RefPtr<VideoData> v(new VideoData(aOffset,
                                     aTime,
                                     aDuration,
                                     aKeyframe,
                                     aTimecode,
-                                    aInfo.mDisplay,
+                                    aDisplay,
                                     0));
   v->mImage = aImage;
   return v.forget();
 }
 
 MediaRawData::MediaRawData()
   : MediaData(RAW_DATA, 0)
   , mCrypto(mCryptoInternal)
--- a/dom/media/MediaData.h
+++ b/dom/media/MediaData.h
@@ -494,24 +494,23 @@ public:
                                                                     int64_t aOffset,
                                                                     int64_t aTime,
                                                                     int64_t aDuration,
                                                                     layers::TextureClient* aBuffer,
                                                                     bool aKeyframe,
                                                                     int64_t aTimecode,
                                                                     const IntRect& aPicture);
 
-  static already_AddRefed<VideoData> CreateFromImage(const VideoInfo& aInfo,
+  static already_AddRefed<VideoData> CreateFromImage(const IntSize& aDisplay,
                                                      int64_t aOffset,
                                                      int64_t aTime,
                                                      int64_t aDuration,
                                                      const RefPtr<Image>& aImage,
                                                      bool aKeyframe,
-                                                     int64_t aTimecode,
-                                                     const IntRect& aPicture);
+                                                     int64_t aTimecode);
 
   // Initialize PlanarYCbCrImage. Only When aCopyData is true,
   // video data is copied to PlanarYCbCrImage.
   static bool SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
                                   const VideoInfo& aInfo,
                                   const YCbCrBuffer &aBuffer,
                                   const IntRect& aPicture,
                                   bool aCopyData);
--- a/dom/media/android/AndroidMediaReader.cpp
+++ b/dom/media/android/AndroidMediaReader.cpp
@@ -167,36 +167,23 @@ bool AndroidMediaReader::DecodeVideoFram
       return true;
 
     currentImage = bufferCallback.GetImage();
     int64_t pos = mDecoder->GetResource()->Tell();
     IntRect picture = mPicture;
 
     RefPtr<VideoData> v;
     if (currentImage) {
-      gfx::IntSize frameSize = currentImage->GetSize();
-      if (frameSize.width != mInitialFrame.width ||
-          frameSize.height != mInitialFrame.height) {
-        // Frame size is different from what the container reports. This is legal,
-        // and we will preserve the ratio of the crop rectangle as it
-        // was reported relative to the picture size reported by the container.
-        picture.x = (mPicture.x * frameSize.width) / mInitialFrame.width;
-        picture.y = (mPicture.y * frameSize.height) / mInitialFrame.height;
-        picture.width = (frameSize.width * mPicture.width) / mInitialFrame.width;
-        picture.height = (frameSize.height * mPicture.height) / mInitialFrame.height;
-      }
-
-      v = VideoData::CreateFromImage(mInfo.mVideo,
+      v = VideoData::CreateFromImage(mInfo.mVideo.mDisplay,
                                      pos,
                                      frame.mTimeUs,
                                      1, // We don't know the duration yet.
                                      currentImage,
                                      frame.mKeyFrame,
-                                     -1,
-                                     picture);
+                                     -1);
     } else {
       // Assume YUV
       VideoData::YCbCrBuffer b;
       b.mPlanes[0].mData = static_cast<uint8_t *>(frame.Y.mData);
       b.mPlanes[0].mStride = frame.Y.mStride;
       b.mPlanes[0].mHeight = frame.Y.mHeight;
       b.mPlanes[0].mWidth = frame.Y.mWidth;
       b.mPlanes[0].mOffset = frame.Y.mOffset;
--- a/dom/media/ipc/VideoDecoderChild.cpp
+++ b/dom/media/ipc/VideoDecoderChild.cpp
@@ -34,31 +34,29 @@ VideoDecoderChild::~VideoDecoderChild()
   AssertOnManagerThread();
   mInitPromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
 }
 
 mozilla::ipc::IPCResult
 VideoDecoderChild::RecvOutput(const VideoDataIPDL& aData)
 {
   AssertOnManagerThread();
-  VideoInfo info(aData.display().width, aData.display().height);
 
   // The Image here creates a TextureData object that takes ownership
   // of the SurfaceDescriptor, and is responsible for making sure that
   // it gets deallocated.
   RefPtr<Image> image = new GPUVideoImage(GetManager(), aData.sd(), aData.frameSize());
 
-  RefPtr<VideoData> video = VideoData::CreateFromImage(info,
+  RefPtr<VideoData> video = VideoData::CreateFromImage(aData.display(),
                                                        aData.base().offset(),
                                                        aData.base().time(),
                                                        aData.base().duration(),
                                                        image,
                                                        aData.base().keyframe(),
-                                                       aData.base().timecode(),
-                                                       IntRect());
+                                                       aData.base().timecode());
   if (mCallback) {
     mCallback->Output(video);
   }
   return IPC_OK();
 }
 
 mozilla::ipc::IPCResult
 VideoDecoderChild::RecvInputExhausted()
--- a/dom/media/platforms/android/RemoteDataDecoder.cpp
+++ b/dom/media/platforms/android/RemoteDataDecoder.cpp
@@ -177,21 +177,19 @@ public:
       }
 
       if (size > 0) {
         RefPtr<layers::Image> img = new SurfaceTextureImage(
           mDecoder->mSurfaceTexture.get(), mDecoder->mConfig.mImage,
           gl::OriginPos::BottomLeft);
 
         RefPtr<VideoData> v = VideoData::CreateFromImage(
-          mDecoder->mConfig, offset, presentationTimeUs, durationUs,
+          mDecoder->mConfig.mDisplay, offset, presentationTimeUs, durationUs,
           img, !!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
-          presentationTimeUs,
-          gfx::IntRect(0, 0, mDecoder->mConfig.mDisplay.width,
-                       mDecoder->mConfig.mDisplay.height));
+          presentationTimeUs);
 
         v->SetListener(Move(releaseSample));
 
         mDecoderCallback->Output(v);
       }
 
       if (isEOS) {
         mDecoderCallback->DrainComplete();
@@ -239,17 +237,17 @@ public:
     mJavaDecoder = CodecProxy::Create(mFormat,
                                       mSurfaceTexture->JavaSurface(),
                                       mJavaCallbacks,
                                       mDrmStubId);
     if (mJavaDecoder == nullptr) {
       return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
     }
     mIsCodecSupportAdaptivePlayback = mJavaDecoder->IsAdaptivePlaybackSupported();
-    mInputDurations.Clear();
+    mInputInfos.Clear();
 
     return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
   }
 
   void Flush() override
   {
     mInputDurations.Clear();
     RemoteDataDecoder::Flush();
--- a/dom/media/platforms/apple/AppleVTDecoder.cpp
+++ b/dom/media/platforms/apple/AppleVTDecoder.cpp
@@ -284,20 +284,16 @@ AppleVTDecoder::OutputFrame(CVPixelBuffe
     }
   }
 
   // Where our resulting image will end up.
   RefPtr<MediaData> data;
   // Bounds.
   VideoInfo info;
   info.mDisplay = nsIntSize(mDisplayWidth, mDisplayHeight);
-  gfx::IntRect visible = gfx::IntRect(0,
-                                      0,
-                                      mPictureWidth,
-                                      mPictureHeight);
 
   if (useNullSample) {
     data = new NullData(aFrameRef.byte_offset,
                         aFrameRef.composition_timestamp.ToMicroseconds(),
                         aFrameRef.duration.ToMicroseconds());
   } else if (mUseSoftwareImages) {
     size_t width = CVPixelBufferGetWidth(aImage);
     size_t height = CVPixelBufferGetHeight(aImage);
@@ -335,16 +331,21 @@ AppleVTDecoder::OutputFrame(CVPixelBuffe
     buffer.mPlanes[2].mData =
       static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(aImage, 1));
     buffer.mPlanes[2].mStride = CVPixelBufferGetBytesPerRowOfPlane(aImage, 1);
     buffer.mPlanes[2].mWidth = (width+1) / 2;
     buffer.mPlanes[2].mHeight = (height+1) / 2;
     buffer.mPlanes[2].mOffset = 1;
     buffer.mPlanes[2].mSkip = 1;
 
+    gfx::IntRect visible = gfx::IntRect(0,
+                                        0,
+                                        mPictureWidth,
+                                        mPictureHeight);
+
     // Copy the image data into our own format.
     data =
       VideoData::CreateAndCopyData(info,
                                    mImageContainer,
                                    aFrameRef.byte_offset,
                                    aFrameRef.composition_timestamp.ToMicroseconds(),
                                    aFrameRef.duration.ToMicroseconds(),
                                    buffer,
@@ -358,24 +359,23 @@ AppleVTDecoder::OutputFrame(CVPixelBuffe
     IOSurfacePtr surface = MacIOSurfaceLib::CVPixelBufferGetIOSurface(aImage);
     MOZ_ASSERT(surface, "Decoder didn't return an IOSurface backed buffer");
 
     RefPtr<MacIOSurface> macSurface = new MacIOSurface(surface);
 
     RefPtr<layers::Image> image = new MacIOSurfaceImage(macSurface);
 
     data =
-      VideoData::CreateFromImage(info,
+      VideoData::CreateFromImage(info.mDisplay,
                                  aFrameRef.byte_offset,
                                  aFrameRef.composition_timestamp.ToMicroseconds(),
                                  aFrameRef.duration.ToMicroseconds(),
                                  image.forget(),
                                  aFrameRef.is_sync_point,
-                                 aFrameRef.decode_timestamp.ToMicroseconds(),
-                                 visible);
+                                 aFrameRef.decode_timestamp.ToMicroseconds());
 #else
     MOZ_ASSERT_UNREACHABLE("No MacIOSurface on iOS");
 #endif
   }
 
   if (!data) {
     NS_ERROR("Couldn't create VideoData for frame");
     mCallback->Error(MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__));
--- a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
@@ -829,24 +829,23 @@ WMFVideoMFTManager::CreateBasicVideoFram
 
   VideoData::SetVideoDataToImage(image,
                                  mVideoInfo,
                                  b,
                                  pictureRegion,
                                  false);
 
   RefPtr<VideoData> v =
-    VideoData::CreateFromImage(mVideoInfo,
+    VideoData::CreateFromImage(mVideoInfo.mDisplay,
                                aStreamOffset,
                                pts.ToMicroseconds(),
                                duration.ToMicroseconds(),
                                image.forget(),
                                false,
-                               -1,
-                               pictureRegion);
+                               -1);
 
   v.forget(aOutVideoData);
   return S_OK;
 }
 
 HRESULT
 WMFVideoMFTManager::CreateD3DVideoFrame(IMFSample* aSample,
                                         int64_t aStreamOffset,
@@ -868,24 +867,23 @@ WMFVideoMFTManager::CreateD3DVideoFrame(
                                   getter_AddRefs(image));
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
   NS_ENSURE_TRUE(image, E_FAIL);
 
   media::TimeUnit pts = GetSampleTime(aSample);
   NS_ENSURE_TRUE(pts.IsValid(), E_FAIL);
   media::TimeUnit duration = GetSampleDuration(aSample);
   NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
-  RefPtr<VideoData> v = VideoData::CreateFromImage(mVideoInfo,
+  RefPtr<VideoData> v = VideoData::CreateFromImage(mVideoInfo.mDisplay,
                                                    aStreamOffset,
                                                    pts.ToMicroseconds(),
                                                    duration.ToMicroseconds(),
                                                    image.forget(),
                                                    false,
-                                                   -1,
-                                                   pictureRegion);
+                                                   -1);
 
   NS_ENSURE_TRUE(v, E_FAIL);
   v.forget(aOutVideoData);
 
   return S_OK;
 }
 
 // Blocks until decoded sample is produced by the deoder.