Bug 1344649 - part 1: let VideoData::CreateFromImage() accept only neccessary parameters. r?jya draft
authorJohn Lin <jolin@mozilla.com>
Tue, 07 Mar 2017 19:04:51 +0800
changeset 495035 32e0577407be5d7288d38c9b7f2ab8c8cd71c898
parent 495034 2cce2d8d634f9a02fc8f2af6ac06442081ee4f04
child 495036 11910ff3cf8bd699aa7f591dfc575ad2a96fb81b
push id48207
push userbmo:jolin@mozilla.com
push dateWed, 08 Mar 2017 06:04:43 +0000
reviewersjya
bugs1344649
milestone55.0a1
Bug 1344649 - part 1: let VideoData::CreateFromImage() accept only neccessary parameters. r?jya VideoData doesn't care what's in aInfo but display size, and aPicture is unused. MozReview-Commit-ID: IBqq8Rm8dM4
dom/media/MediaData.cpp
dom/media/MediaData.h
dom/media/android/AndroidMediaReader.cpp
dom/media/ipc/VideoDecoderChild.cpp
dom/media/platforms/android/RemoteDataDecoder.cpp
dom/media/platforms/apple/AppleVTDecoder.cpp
dom/media/platforms/wmf/WMFVideoMFTManager.cpp
--- a/dom/media/MediaData.cpp
+++ b/dom/media/MediaData.cpp
@@ -428,31 +428,30 @@ VideoData::CreateAndCopyData(const Video
                       argb_buffer, size.width * 4,
                       size.width, size.height);
 
   return v.forget();
 }
 
 /* static */
 already_AddRefed<VideoData>
-VideoData::CreateFromImage(const VideoInfo& aInfo,
+VideoData::CreateFromImage(const IntSize& aDisplay,
                            int64_t aOffset,
                            int64_t aTime,
                            int64_t aDuration,
                            const RefPtr<Image>& aImage,
                            bool aKeyframe,
-                           int64_t aTimecode,
-                           const IntRect& aPicture)
+                           int64_t aTimecode)
 {
   RefPtr<VideoData> v(new VideoData(aOffset,
                                     aTime,
                                     aDuration,
                                     aKeyframe,
                                     aTimecode,
-                                    aInfo.mDisplay,
+                                    aDisplay,
                                     0));
   v->mImage = aImage;
   return v.forget();
 }
 
 MediaRawData::MediaRawData()
   : MediaData(RAW_DATA, 0)
   , mCrypto(mCryptoInternal)
--- a/dom/media/MediaData.h
+++ b/dom/media/MediaData.h
@@ -513,24 +513,23 @@ public:
     int64_t aTime,
     int64_t aDuration,
     layers::TextureClient* aBuffer,
     bool aKeyframe,
     int64_t aTimecode,
     const IntRect& aPicture);
 
   static already_AddRefed<VideoData> CreateFromImage(
-    const VideoInfo& aInfo,
+    const IntSize& aDisplay,
     int64_t aOffset,
     int64_t aTime,
     int64_t aDuration,
     const RefPtr<Image>& aImage,
     bool aKeyframe,
-    int64_t aTimecode,
-    const IntRect& aPicture);
+    int64_t aTimecode);
 
   // Initialize PlanarYCbCrImage. Only When aCopyData is true,
   // video data is copied to PlanarYCbCrImage.
   static bool SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
                                   const VideoInfo& aInfo,
                                   const YCbCrBuffer& aBuffer,
                                   const IntRect& aPicture,
                                   bool aCopyData);
--- a/dom/media/android/AndroidMediaReader.cpp
+++ b/dom/media/android/AndroidMediaReader.cpp
@@ -167,36 +167,23 @@ bool AndroidMediaReader::DecodeVideoFram
       return true;
 
     currentImage = bufferCallback.GetImage();
     int64_t pos = mDecoder->GetResource()->Tell();
     IntRect picture = mPicture;
 
     RefPtr<VideoData> v;
     if (currentImage) {
-      gfx::IntSize frameSize = currentImage->GetSize();
-      if (frameSize.width != mInitialFrame.width ||
-          frameSize.height != mInitialFrame.height) {
-        // Frame size is different from what the container reports. This is legal,
-        // and we will preserve the ratio of the crop rectangle as it
-        // was reported relative to the picture size reported by the container.
-        picture.x = (mPicture.x * frameSize.width) / mInitialFrame.width;
-        picture.y = (mPicture.y * frameSize.height) / mInitialFrame.height;
-        picture.width = (frameSize.width * mPicture.width) / mInitialFrame.width;
-        picture.height = (frameSize.height * mPicture.height) / mInitialFrame.height;
-      }
-
-      v = VideoData::CreateFromImage(mInfo.mVideo,
+      v = VideoData::CreateFromImage(mInfo.mVideo.mDisplay,
                                      pos,
                                      frame.mTimeUs,
                                      1, // We don't know the duration yet.
                                      currentImage,
                                      frame.mKeyFrame,
-                                     -1,
-                                     picture);
+                                     -1);
     } else {
       // Assume YUV
       VideoData::YCbCrBuffer b;
       b.mPlanes[0].mData = static_cast<uint8_t *>(frame.Y.mData);
       b.mPlanes[0].mStride = frame.Y.mStride;
       b.mPlanes[0].mHeight = frame.Y.mHeight;
       b.mPlanes[0].mWidth = frame.Y.mWidth;
       b.mPlanes[0].mOffset = frame.Y.mOffset;
--- a/dom/media/ipc/VideoDecoderChild.cpp
+++ b/dom/media/ipc/VideoDecoderChild.cpp
@@ -34,31 +34,29 @@ VideoDecoderChild::~VideoDecoderChild()
   AssertOnManagerThread();
   mInitPromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
 }
 
 mozilla::ipc::IPCResult
 VideoDecoderChild::RecvOutput(const VideoDataIPDL& aData)
 {
   AssertOnManagerThread();
-  VideoInfo info(aData.display().width, aData.display().height);
 
   // The Image here creates a TextureData object that takes ownership
   // of the SurfaceDescriptor, and is responsible for making sure that
   // it gets deallocated.
   RefPtr<Image> image = new GPUVideoImage(GetManager(), aData.sd(), aData.frameSize());
 
-  RefPtr<VideoData> video = VideoData::CreateFromImage(info,
+  RefPtr<VideoData> video = VideoData::CreateFromImage(aData.display(),
                                                        aData.base().offset(),
                                                        aData.base().time(),
                                                        aData.base().duration(),
                                                        image,
                                                        aData.base().keyframe(),
-                                                       aData.base().timecode(),
-                                                       IntRect());
+                                                       aData.base().timecode());
   mDecodedData.AppendElement(Move(video));
   return IPC_OK();
 }
 
 mozilla::ipc::IPCResult
 VideoDecoderChild::RecvInputExhausted()
 {
   AssertOnManagerThread();
--- a/dom/media/platforms/android/RemoteDataDecoder.cpp
+++ b/dom/media/platforms/android/RemoteDataDecoder.cpp
@@ -177,25 +177,23 @@ public:
           && !isEOS) {
         return;
       }
 
       if (size > 0) {
         MutexAutoLock lock(mDecoder->mMutex);
 
         RefPtr<layers::Image> img = new SurfaceTextureImage(
-          mDecoder->mSurfaceTexture.get(), mDecoder->mConfig.mDisplay,
+          mDecoder->mSurfaceTexture.get(), mDecoder->mConfig.mImage,
           gl::OriginPos::BottomLeft);
 
         RefPtr<VideoData> v = VideoData::CreateFromImage(
-          mDecoder->mConfig, offset, presentationTimeUs, durationUs,
+          mDecoder->mConfig.mDisplay, offset, presentationTimeUs, durationUs,
           img, !!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
-          presentationTimeUs,
-          gfx::IntRect(0, 0, mDecoder->mConfig.mDisplay.width,
-                       mDecoder->mConfig.mDisplay.height));
+          presentationTimeUs);
 
         v->SetListener(Move(releaseSample));
 
         mDecoder->Output(v);
       }
 
       if (isEOS) {
         mDecoder->DrainComplete();
--- a/dom/media/platforms/apple/AppleVTDecoder.cpp
+++ b/dom/media/platforms/apple/AppleVTDecoder.cpp
@@ -423,24 +423,23 @@ AppleVTDecoder::OutputFrame(CVPixelBuffe
     IOSurfacePtr surface = MacIOSurfaceLib::CVPixelBufferGetIOSurface(aImage);
     MOZ_ASSERT(surface, "Decoder didn't return an IOSurface backed buffer");
 
     RefPtr<MacIOSurface> macSurface = new MacIOSurface(surface);
 
     RefPtr<layers::Image> image = new MacIOSurfaceImage(macSurface);
 
     data =
-      VideoData::CreateFromImage(info,
+      VideoData::CreateFromImage(info.mDisplay,
                                  aFrameRef.byte_offset,
                                  aFrameRef.composition_timestamp.ToMicroseconds(),
                                  aFrameRef.duration.ToMicroseconds(),
                                  image.forget(),
                                  aFrameRef.is_sync_point,
-                                 aFrameRef.decode_timestamp.ToMicroseconds(),
-                                 visible);
+                                 aFrameRef.decode_timestamp.ToMicroseconds());
 #else
     MOZ_ASSERT_UNREACHABLE("No MacIOSurface on iOS");
 #endif
   }
 
   if (!data) {
     NS_ERROR("Couldn't create VideoData for frame");
     MonitorAutoLock mon(mMonitor);
--- a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
@@ -872,24 +872,23 @@ WMFVideoMFTManager::CreateBasicVideoFram
 
   VideoData::SetVideoDataToImage(image,
                                  mVideoInfo,
                                  b,
                                  pictureRegion,
                                  false);
 
   RefPtr<VideoData> v =
-    VideoData::CreateFromImage(mVideoInfo,
+    VideoData::CreateFromImage(mVideoInfo.mDisplay,
                                aStreamOffset,
                                pts.ToMicroseconds(),
                                duration.ToMicroseconds(),
                                image.forget(),
                                false,
-                               -1,
-                               pictureRegion);
+                               -1);
 
   v.forget(aOutVideoData);
   return S_OK;
 }
 
 HRESULT
 WMFVideoMFTManager::CreateD3DVideoFrame(IMFSample* aSample,
                                         int64_t aStreamOffset,
@@ -911,24 +910,23 @@ WMFVideoMFTManager::CreateD3DVideoFrame(
                                   getter_AddRefs(image));
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
   NS_ENSURE_TRUE(image, E_FAIL);
 
   media::TimeUnit pts = GetSampleTime(aSample);
   NS_ENSURE_TRUE(pts.IsValid(), E_FAIL);
   media::TimeUnit duration = GetSampleDuration(aSample);
   NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
-  RefPtr<VideoData> v = VideoData::CreateFromImage(mVideoInfo,
+  RefPtr<VideoData> v = VideoData::CreateFromImage(mVideoInfo.mDisplay,
                                                    aStreamOffset,
                                                    pts.ToMicroseconds(),
                                                    duration.ToMicroseconds(),
                                                    image.forget(),
                                                    false,
-                                                   -1,
-                                                   pictureRegion);
+                                                   -1);
 
   NS_ENSURE_TRUE(v, E_FAIL);
   v.forget(aOutVideoData);
 
   return S_OK;
 }
 
 // Blocks until decoded sample is produced by the deoder.