Bug 1014393 - Update EncodedFrame class to more closely resemble MediaData class. r?pehrsons draft
authorBryce Van Dyk <bvandyk@mozilla.com>
Mon, 13 Nov 2017 15:08:32 -0500
changeset 698383 fe81b3a0a69efbd18b2cd64eb5d6c6b6902fbe61
parent 698382 7e26b6073a842357dad65b1b71ec565031235011
child 698384 4506fc66b5a9cbfda5c1c4066c085c40d31c9520
push id89276
push userbvandyk@mozilla.com
push dateWed, 15 Nov 2017 17:29:54 +0000
reviewerspehrsons
bugs1014393
milestone58.0a1
Bug 1014393 - Update EncodedFrame class to more closely resemble MediaData class. r?pehrsons This changes EncodedFrame to behave more like MediaData, so that EncodedFrame can be used with the MediaQueue data structure. It also provides a somewhat more consistent interface across media data types. MozReview-Commit-ID: I2o6n30ErxB
dom/media/encoder/EncodedFrameContainer.h
dom/media/encoder/MediaEncoder.cpp
dom/media/encoder/OpusTrackEncoder.cpp
dom/media/encoder/VP8TrackEncoder.cpp
dom/media/gtest/TestAudioTrackEncoder.cpp
dom/media/gtest/TestVideoTrackEncoder.cpp
dom/media/gtest/TestWebMWriter.cpp
dom/media/ogg/OggWriter.cpp
dom/media/webm/EbmlComposer.cpp
--- a/dom/media/encoder/EncodedFrameContainer.h
+++ b/dom/media/encoder/EncodedFrameContainer.h
@@ -2,16 +2,17 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef EncodedFrameContainer_H_
 #define EncodedFrameContainer_H_
 
 #include "nsTArray.h"
+#include "VideoUtils.h"
 
 namespace mozilla {
 
 class EncodedFrame;
 
 /*
  * This container is used to carry video or audio encoded data from encoder to muxer.
  * The media data object is created by encoder and recycle by the destructor.
@@ -36,20 +37,20 @@ private:
   nsTArray<RefPtr<EncodedFrame> > mEncodedFrames;
 };
 
 // Represent one encoded frame
 class EncodedFrame final
 {
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(EncodedFrame)
 public:
-  EncodedFrame() :
-    mTimeStamp(0),
-    mDuration(0),
-    mFrameType(UNKNOWN)
+  EncodedFrame()
+    : mTime(0)
+    , mDuration(0)
+    , mFrameType(UNKNOWN)
   {}
   enum FrameType {
     VP8_I_FRAME,      // VP8 intraframe
     VP8_P_FRAME,      // VP8 predicted frame
     OPUS_AUDIO_FRAME, // Opus audio frame
     VORBIS_AUDIO_FRAME,
     AVC_I_FRAME,
     AVC_P_FRAME,
@@ -76,34 +77,43 @@ public:
       return NS_OK;
     }
     return NS_ERROR_FAILURE;
   }
   const nsTArray<uint8_t>& GetFrameData() const
   {
     return mFrameData;
   }
-  uint64_t GetTimeStamp() const { return mTimeStamp; }
-  void SetTimeStamp(uint64_t aTimeStamp) { mTimeStamp = aTimeStamp; }
+  // Timestamp in microseconds
+  uint64_t mTime;
+  // The playback duration of this packet in number of samples or microseconds
+  uint64_t mDuration;
+  // Represent what is in the FrameData
+  FrameType mFrameType;
 
-  uint64_t GetDuration() const { return mDuration; }
-  void SetDuration(uint64_t aDuration) { mDuration = aDuration; }
+  uint64_t GetEndTime() const
+  {
+    // Defend against untested types. This assert can be removed but we want
+    // to make sure other types are correctly accounted for.
+    MOZ_ASSERT(mFrameType == OPUS_AUDIO_FRAME || mFrameType == VP8_I_FRAME ||
+               mFrameType == VP8_P_FRAME);
+    if (mFrameType == OPUS_AUDIO_FRAME) {
+      // See bug 1356054 for discussion around standardization of time units
+      // (can remove videoutils import when this goes)
+      return mTime + FramesToUsecs(mDuration, 48000).value();
+    } else {
+      return mTime + mDuration;
+    }
+  }
 
-  FrameType GetFrameType() const { return mFrameType; }
-  void SetFrameType(FrameType aFrameType) { mFrameType = aFrameType; }
 private:
   // Private destructor, to discourage deletion outside of Release():
   ~EncodedFrame()
   {
   }
 
   // Encoded data
   nsTArray<uint8_t> mFrameData;
-  uint64_t mTimeStamp;
-  // The playback duration of this packet in number of samples
-  uint64_t mDuration;
-  // Represent what is in the FrameData
-  FrameType mFrameType;
 };
 
 } // namespace mozilla
 
 #endif
--- a/dom/media/encoder/MediaEncoder.cpp
+++ b/dom/media/encoder/MediaEncoder.cpp
@@ -862,18 +862,18 @@ MediaEncoder::EncodeData()
     rv = mAudioEncoder->GetEncodedTrack(encodedAudioData);
     if (NS_FAILED(rv)) {
       // Encoding might be canceled.
       LOG(LogLevel::Error, ("Failed to get encoded data from audio encoder."));
       return rv;
     }
     for (const RefPtr<EncodedFrame>& frame :
          encodedAudioData.GetEncodedFrames()) {
-      if (frame->GetFrameType() == EncodedFrame::FrameType::OPUS_AUDIO_FRAME) {
-        frame->SetTimeStamp(frame->GetTimeStamp() + mAudioCodecDelay);
+      if (frame->mFrameType == EncodedFrame::FrameType::OPUS_AUDIO_FRAME) {
+        frame->mTime += mAudioCodecDelay;
       }
       mEncodedAudioFrames.AppendElement(frame);
     }
   }
 
   return rv;
 }
 
--- a/dom/media/encoder/OpusTrackEncoder.cpp
+++ b/dom/media/encoder/OpusTrackEncoder.cpp
@@ -338,17 +338,17 @@ OpusTrackEncoder::GetEncodedTrack(Encode
     }
 
     // Possible greatest value of framesToFetch = 3844: see
     // https://bugzilla.mozilla.org/show_bug.cgi?id=1349421#c8. frameCopied
     // should not be able to exceed this value.
     MOZ_ASSERT(frameCopied <= 3844, "frameCopied exceeded expected range");
 
     RefPtr<EncodedFrame> audiodata = new EncodedFrame();
-    audiodata->SetFrameType(EncodedFrame::OPUS_AUDIO_FRAME);
+    audiodata->mFrameType = EncodedFrame::OPUS_AUDIO_FRAME;
     int framesInPCM = frameCopied;
     if (mResampler) {
       AutoTArray<AudioDataValue, 9600> resamplingDest;
       // We want to consume all the input data, so we slightly oversize the
       // resampled data buffer so we can fit the output data in. We cannot really
       // predict the output frame count at each call.
       uint32_t outframes = frameCopied * kOpusSamplingRate / mSamplingRate + 1;
       uint32_t inframes = frameCopied;
@@ -380,20 +380,20 @@ OpusTrackEncoder::GetEncodedTrack(Encode
               resamplingDest.Elements(), outframesToCopy * mChannels);
       int frameLeftover = outframes - outframesToCopy;
       mResampledLeftover.SetLength(frameLeftover * mChannels);
       PodCopy(mResampledLeftover.Elements(),
               resamplingDest.Elements() + outframesToCopy * mChannels,
               mResampledLeftover.Length());
       // This is always at 48000Hz.
       framesInPCM = framesLeft + outframesToCopy;
-      audiodata->SetDuration(framesInPCM);
+      audiodata->mDuration = framesInPCM;
     } else {
       // The ogg time stamping and pre-skip is always timed at 48000.
-      audiodata->SetDuration(frameCopied * (kOpusSamplingRate / mSamplingRate));
+      audiodata->mDuration = frameCopied * (kOpusSamplingRate / mSamplingRate);
     }
 
     // Remove the raw data which has been pulled to pcm buffer.
     // The value of frameCopied should equal to (or smaller than, if eos)
     // GetPacketDuration().
     mSourceSegment.RemoveLeading(frameCopied);
 
     // Has reached the end of input stream and all queued data has pulled for
@@ -435,17 +435,17 @@ OpusTrackEncoder::GetEncodedTrack(Encode
         speex_resampler_destroy(mResampler);
         mResampler = nullptr;
       }
       mResampledLeftover.SetLength(0);
     }
 
     audiodata->SwapInFrameData(frameData);
     // timestamp should be the time of the first sample
-    audiodata->SetTimeStamp(mOutputTimeStamp);
+    audiodata->mTime = mOutputTimeStamp;
     mOutputTimeStamp += FramesToUsecs(GetPacketDuration(), kOpusSamplingRate).value();
     LOG("[Opus] mOutputTimeStamp %lld.",mOutputTimeStamp);
     aData.AppendEncodedFrame(audiodata);
   }
 
   return result >= 0 ? NS_OK : NS_ERROR_FAILURE;
 }
 
--- a/dom/media/encoder/VP8TrackEncoder.cpp
+++ b/dom/media/encoder/VP8TrackEncoder.cpp
@@ -269,25 +269,25 @@ VP8TrackEncoder::GetEncodedPartitions(En
       }
       break;
     }
   }
 
   if (!frameData.IsEmpty()) {
     // Copy the encoded data to aData.
     EncodedFrame* videoData = new EncodedFrame();
-    videoData->SetFrameType(frameType);
+    videoData->mFrameType = frameType;
 
     // Convert the timestamp and duration to Usecs.
     CheckedInt64 timestamp = FramesToUsecs(pkt->data.frame.pts, mTrackRate);
     if (!timestamp.isValid()) {
       NS_ERROR("Microsecond timestamp overflow");
       return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
     }
-    videoData->SetTimeStamp((uint64_t)timestamp.value());
+    videoData->mTime = (uint64_t)timestamp.value();
 
     mExtractedDuration += pkt->data.frame.duration;
     if (!mExtractedDuration.isValid()) {
       NS_ERROR("Duration overflow");
       return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
     }
 
     CheckedInt64 totalDuration =
@@ -299,22 +299,24 @@ VP8TrackEncoder::GetEncodedPartitions(En
 
     CheckedInt64 duration = totalDuration - mExtractedDurationUs;
     if (!duration.isValid()) {
       NS_ERROR("Duration overflow");
       return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
     }
 
     mExtractedDurationUs = totalDuration;
-    videoData->SetDuration((uint64_t)duration.value());
+    videoData->mDuration = (uint64_t)duration.value();
     videoData->SwapInFrameData(frameData);
     VP8LOG(LogLevel::Verbose,
-           "GetEncodedPartitions TimeStamp %" PRIu64 ", Duration %" PRIu64 ", FrameType %d",
-           videoData->GetTimeStamp(), videoData->GetDuration(),
-           videoData->GetFrameType());
+           "GetEncodedPartitions TimeStamp %" PRIu64 ", Duration %" PRIu64
+           ", FrameType %d",
+           videoData->mTime,
+           videoData->mDuration,
+           videoData->mFrameType);
     aData.AppendEncodedFrame(videoData);
   }
 
   return pkt ? NS_OK : NS_ERROR_NOT_AVAILABLE;
 }
 
 nsresult VP8TrackEncoder::PrepareRawFrame(VideoChunk &aChunk)
 {
@@ -673,18 +675,18 @@ VP8TrackEncoder::GetEncodedTrack(Encoded
 
         CheckedInt64 totalDuration = FramesToUsecs(mExtractedDuration.value(), mTrackRate);
         CheckedInt64 skippedDuration = totalDuration - mExtractedDurationUs;
         mExtractedDurationUs = totalDuration;
         if (!skippedDuration.isValid()) {
           NS_ERROR("skipped duration overflow");
           return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
         }
-        last->SetDuration(last->GetDuration() +
-                          (static_cast<uint64_t>(skippedDuration.value())));
+        last->mDuration =
+          last->mDuration + (static_cast<uint64_t>(skippedDuration.value()));
       }
     }
 
     // Move forward the mEncodedTimestamp.
     mEncodedTimestamp += chunk.GetDuration();
     totalProcessedDuration += chunk.GetDuration();
 
     // Check what to do next.
--- a/dom/media/gtest/TestAudioTrackEncoder.cpp
+++ b/dom/media/gtest/TestAudioTrackEncoder.cpp
@@ -236,14 +236,14 @@ TEST(OpusAudioTrackEncoder, FrameEncode)
   encoder.AdvanceCurrentTime(samples);
 
   EncodedFrameContainer container;
   EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   // Verify that encoded data is 5 seconds long.
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   // 44100 as used above gets resampled to 48000 for opus.
   const uint64_t five = 48000 * 5;
   EXPECT_EQ(five, totalDuration);
 }
--- a/dom/media/gtest/TestVideoTrackEncoder.cpp
+++ b/dom/media/gtest/TestVideoTrackEncoder.cpp
@@ -320,21 +320,21 @@ TEST(VP8VideoTrackEncoder, SingleFrameEn
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Read out encoded data, and verify.
   const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
   const size_t oneElement = 1;
   ASSERT_EQ(oneElement, frames.Length());
 
-  EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType()) <<
-    "We only have one frame, so it should be a keyframe";
+  EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType)
+    << "We only have one frame, so it should be a keyframe";
 
   const uint64_t halfSecond = PR_USEC_PER_SEC / 2;
-  EXPECT_EQ(halfSecond, frames[0]->GetDuration());
+  EXPECT_EQ(halfSecond, frames[0]->mDuration);
 }
 
 // Test that encoding a couple of identical images gives useful output.
 TEST(VP8VideoTrackEncoder, SameFrameEncode)
 {
   TestVP8TrackEncoder encoder;
 
   // Pass 15 100ms frames to the encoder.
@@ -360,17 +360,17 @@ TEST(VP8VideoTrackEncoder, SameFrameEnco
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Verify total duration being 1.5s.
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t oneAndAHalf = (PR_USEC_PER_SEC / 2) * 3;
   EXPECT_EQ(oneAndAHalf, totalDuration);
 }
 
 // Test encoding a track that starts with null data
 TEST(VP8VideoTrackEncoder, NullFrameFirst)
 {
@@ -407,17 +407,17 @@ TEST(VP8VideoTrackEncoder, NullFrameFirs
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Verify total duration being 0.3s.
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t pointThree = (PR_USEC_PER_SEC / 10) * 3;
   EXPECT_EQ(pointThree, totalDuration);
 }
 
 // Test encoding a track that has to skip frames.
 TEST(VP8VideoTrackEncoder, SkippedFrames)
 {
@@ -446,17 +446,17 @@ TEST(VP8VideoTrackEncoder, SkippedFrames
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Verify total duration being 100 * 1ms = 100ms.
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t hundredMillis = PR_USEC_PER_SEC / 10;
   EXPECT_EQ(hundredMillis, totalDuration);
 }
 
 // Test encoding a track with frames subject to rounding errors.
 TEST(VP8VideoTrackEncoder, RoundingErrorFramesEncode)
 {
@@ -494,17 +494,17 @@ TEST(VP8VideoTrackEncoder, RoundingError
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Verify total duration being 1s.
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t oneSecond= PR_USEC_PER_SEC;
   EXPECT_EQ(oneSecond, totalDuration);
 }
 
 // Test that we're encoding timestamps rather than durations.
 TEST(VP8VideoTrackEncoder, TimestampFrameEncode)
 {
@@ -547,18 +547,18 @@ TEST(VP8VideoTrackEncoder, TimestampFram
 
   // Verify total duration being 4s and individual frames being [0.5s, 1.5s, 1s, 1s]
   uint64_t expectedDurations[] = { (PR_USEC_PER_SEC / 10) / 2,
                                    (PR_USEC_PER_SEC / 10) * 3 / 2,
                                    (PR_USEC_PER_SEC / 10)};
   uint64_t totalDuration = 0;
   size_t i = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    EXPECT_EQ(expectedDurations[i++], frame->GetDuration());
-    totalDuration += frame->GetDuration();
+    EXPECT_EQ(expectedDurations[i++], frame->mDuration);
+    totalDuration += frame->mDuration;
   }
   const uint64_t pointThree = (PR_USEC_PER_SEC / 10) * 3;
   EXPECT_EQ(pointThree, totalDuration);
 }
 
 // Test that suspending an encoding works.
 TEST(VP8VideoTrackEncoder, Suspended)
 {
@@ -611,17 +611,17 @@ TEST(VP8VideoTrackEncoder, Suspended)
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Verify that we have two encoded frames and a total duration of 0.2s.
   const uint64_t two = 2;
   EXPECT_EQ(two, container.GetEncodedFrames().Length());
 
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t pointTwo = (PR_USEC_PER_SEC / 10) * 2;
   EXPECT_EQ(pointTwo, totalDuration);
 }
 
 // Test that ending a track while the video track encoder is suspended works.
 TEST(VP8VideoTrackEncoder, SuspendedUntilEnd)
 {
@@ -662,17 +662,17 @@ TEST(VP8VideoTrackEncoder, SuspendedUnti
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Verify that we have one encoded frames and a total duration of 0.1s.
   const uint64_t one = 1;
   EXPECT_EQ(one, container.GetEncodedFrames().Length());
 
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t pointOne = PR_USEC_PER_SEC / 10;
   EXPECT_EQ(pointOne, totalDuration);
 }
 
 // Test that ending a track that was always suspended works.
 TEST(VP8VideoTrackEncoder, AlwaysSuspended)
 {
@@ -752,17 +752,17 @@ TEST(VP8VideoTrackEncoder, SuspendedBegi
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Verify that we have one encoded frames and a total duration of 0.1s.
   const uint64_t one = 1;
   EXPECT_EQ(one, container.GetEncodedFrames().Length());
 
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t half = PR_USEC_PER_SEC / 2;
   EXPECT_EQ(half, totalDuration);
 }
 
 // Test that suspending and resuming in the middle of already pushed data
 // works.
 TEST(VP8VideoTrackEncoder, SuspendedOverlap)
@@ -807,17 +807,17 @@ TEST(VP8VideoTrackEncoder, SuspendedOver
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Verify that we have two encoded frames and a total duration of 0.1s.
   const uint64_t two= 2;
   EXPECT_EQ(two, container.GetEncodedFrames().Length());
 
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t onePointTwo = (PR_USEC_PER_SEC / 10) * 12;
   EXPECT_EQ(onePointTwo, totalDuration);
 }
 
 // Test that ending a track in the middle of already pushed data works.
 TEST(VP8VideoTrackEncoder, PrematureEnding)
 {
@@ -842,17 +842,17 @@ TEST(VP8VideoTrackEncoder, PrematureEndi
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t half = PR_USEC_PER_SEC / 2;
   EXPECT_EQ(half, totalDuration);
 }
 
 // Test that a track that starts at t > 0 works as expected.
 TEST(VP8VideoTrackEncoder, DelayedStart)
 {
@@ -878,17 +878,17 @@ TEST(VP8VideoTrackEncoder, DelayedStart)
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t half = PR_USEC_PER_SEC / 2;
   EXPECT_EQ(half, totalDuration);
 }
 
 // Test that a track that starts at t > 0 works as expected, when
 // SetStartOffset comes after AppendVideoSegment.
 TEST(VP8VideoTrackEncoder, DelayedStartOtherEventOrder)
@@ -915,17 +915,17 @@ TEST(VP8VideoTrackEncoder, DelayedStartO
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t half = PR_USEC_PER_SEC / 2;
   EXPECT_EQ(half, totalDuration);
 }
 
 // Test that a track that starts at t >>> 0 works as expected.
 TEST(VP8VideoTrackEncoder, VeryDelayedStart)
 {
@@ -951,17 +951,17 @@ TEST(VP8VideoTrackEncoder, VeryDelayedSt
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
+    totalDuration += frame->mDuration;
   }
   const uint64_t half = PR_USEC_PER_SEC / 2;
   EXPECT_EQ(half, totalDuration);
 }
 
 // EOS test
 TEST(VP8VideoTrackEncoder, EncodeComplete)
 {
--- a/dom/media/gtest/TestWebMWriter.cpp
+++ b/dom/media/gtest/TestWebMWriter.cpp
@@ -75,19 +75,19 @@ public:
   // previous cluster so that we can retrieve data by |GetContainerData|.
   void AppendDummyFrame(EncodedFrame::FrameType aFrameType,
                         uint64_t aDuration) {
     EncodedFrameContainer encodedVideoData;
     nsTArray<uint8_t> frameData;
     RefPtr<EncodedFrame> videoData = new EncodedFrame();
     // Create dummy frame data.
     frameData.SetLength(FIXED_FRAMESIZE);
-    videoData->SetFrameType(aFrameType);
-    videoData->SetTimeStamp(mTimestamp);
-    videoData->SetDuration(aDuration);
+    videoData->mFrameType = aFrameType;
+    videoData->mTime = mTimestamp;
+    videoData->mDuration = aDuration;
     videoData->SwapInFrameData(frameData);
     encodedVideoData.AppendEncodedFrame(videoData);
     WriteEncodedTrack(encodedVideoData, 0);
     mTimestamp += aDuration;
   }
 
   bool HaveValidCluster() {
     nsTArray<nsTArray<uint8_t> > encodedBuf;
--- a/dom/media/ogg/OggWriter.cpp
+++ b/dom/media/ogg/OggWriter.cpp
@@ -52,26 +52,27 @@ OggWriter::Init()
 nsresult
 OggWriter::WriteEncodedTrack(const EncodedFrameContainer& aData,
                              uint32_t aFlags)
 {
   AUTO_PROFILER_LABEL("OggWriter::WriteEncodedTrack", OTHER);
 
   uint32_t len = aData.GetEncodedFrames().Length();
   for (uint32_t i = 0; i < len; i++) {
-    if (aData.GetEncodedFrames()[i]->GetFrameType() != EncodedFrame::OPUS_AUDIO_FRAME) {
+    if (aData.GetEncodedFrames()[i]->mFrameType !=
+        EncodedFrame::OPUS_AUDIO_FRAME) {
       LOG("[OggWriter] wrong encoded data type!");
       return NS_ERROR_FAILURE;
     }
 
     // only pass END_OF_STREAM on the last frame!
-    nsresult rv = WriteEncodedData(aData.GetEncodedFrames()[i]->GetFrameData(),
-                                   aData.GetEncodedFrames()[i]->GetDuration(),
-                                   i < len-1 ? (aFlags & ~ContainerWriter::END_OF_STREAM) :
-                                   aFlags);
+    nsresult rv = WriteEncodedData(
+      aData.GetEncodedFrames()[i]->GetFrameData(),
+      aData.GetEncodedFrames()[i]->mDuration,
+      i < len - 1 ? (aFlags & ~ContainerWriter::END_OF_STREAM) : aFlags);
     if (NS_FAILED(rv)) {
       LOG("%p Failed to WriteEncodedTrack!", this);
       return rv;
     }
   }
   return NS_OK;
 }
 
--- a/dom/media/webm/EbmlComposer.cpp
+++ b/dom/media/webm/EbmlComposer.cpp
@@ -125,26 +125,26 @@ void EbmlComposer::FinishCluster()
 }
 
 void
 EbmlComposer::WriteSimpleBlock(EncodedFrame* aFrame)
 {
   EbmlGlobal ebml;
   ebml.offset = 0;
 
-  auto frameType = aFrame->GetFrameType();
+  auto frameType = aFrame->mFrameType;
   bool flush = false;
   bool isVP8IFrame = (frameType == EncodedFrame::FrameType::VP8_I_FRAME);
   if (isVP8IFrame) {
     FinishCluster();
     flush = true;
   } else {
     // Force it to calculate timecode using signed math via cast
     int64_t timeCode =
-      (aFrame->GetTimeStamp() / ((int)PR_USEC_PER_MSEC) - mClusterTimecode);
+      (aFrame->mTime / ((int)PR_USEC_PER_MSEC) - mClusterTimecode);
     if (timeCode < SHRT_MIN || timeCode > SHRT_MAX ) {
       // We're probably going to overflow (or underflow) the timeCode value later!
       FinishCluster();
       flush = true;
     }
   }
 
   auto block = mClusterBuffs.AppendElement();
@@ -154,24 +154,24 @@ EbmlComposer::WriteSimpleBlock(EncodedFr
   if (flush) {
     EbmlLoc ebmlLoc;
     Ebml_StartSubElement(&ebml, &ebmlLoc, Cluster);
     MOZ_ASSERT(mClusterBuffs.Length() > 0);
     // current cluster header array index
     mClusterHeaderIndex = mClusterBuffs.Length() - 1;
     mClusterLengthLoc = ebmlLoc.offset;
     // if timeCode didn't under/overflow before, it shouldn't after this
-    mClusterTimecode = aFrame->GetTimeStamp() / PR_USEC_PER_MSEC;
+    mClusterTimecode = aFrame->mTime / PR_USEC_PER_MSEC;
     Ebml_SerializeUnsigned(&ebml, Timecode, mClusterTimecode);
     mFlushState |= FLUSH_CLUSTER;
   }
 
   bool isOpus = (frameType == EncodedFrame::FrameType::OPUS_AUDIO_FRAME);
   // Can't underflow/overflow now
-  int64_t timeCode = aFrame->GetTimeStamp() / ((int) PR_USEC_PER_MSEC) - mClusterTimecode;
+  int64_t timeCode = aFrame->mTime / ((int)PR_USEC_PER_MSEC) - mClusterTimecode;
   MOZ_ASSERT(timeCode >= SHRT_MIN && timeCode <= SHRT_MAX);
   writeSimpleBlock(&ebml, isOpus ? 0x2 : 0x1, static_cast<short>(timeCode), isVP8IFrame,
                    0, 0, (unsigned char*)aFrame->GetFrameData().Elements(),
                    aFrame->GetFrameData().Length());
   MOZ_ASSERT(ebml.offset <= DEFAULT_HEADER_SIZE +
              aFrame->GetFrameData().Length(),
              "write more data > EBML_BUFFER_SIZE");
   block->SetLength(ebml.offset);