Bug 1014393 - Update EncodedFrame class to more closely resemble MediaData class.
This changes EncodedFrame to behave more like MediaData, so that EncodedFrame
can be used with the MediaQueue data structure.
MozReview-Commit-ID: IDmUXkQ31gI
--- a/dom/media/encoder/EncodedFrameContainer.h
+++ b/dom/media/encoder/EncodedFrameContainer.h
@@ -2,16 +2,17 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef EncodedFrameContainer_H_
#define EncodedFrameContainer_H_
#include "nsTArray.h"
+#include "VideoUtils.h"
namespace mozilla {
class EncodedFrame;
/*
* This container is used to carry video or audio encoded data from encoder to muxer.
* The media data object is created by encoder and recycle by the destructor.
@@ -37,17 +38,17 @@ private:
};
// Represent one encoded frame
class EncodedFrame final
{
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(EncodedFrame)
public:
EncodedFrame() :
- mTimeStamp(0),
+ mTime(0),
mDuration(0),
mFrameType(UNKNOWN)
{}
enum FrameType {
VP8_I_FRAME, // VP8 intraframe
VP8_P_FRAME, // VP8 predicted frame
OPUS_AUDIO_FRAME, // Opus audio frame
VORBIS_AUDIO_FRAME,
@@ -76,34 +77,36 @@ public:
return NS_OK;
}
return NS_ERROR_FAILURE;
}
const nsTArray<uint8_t>& GetFrameData() const
{
return mFrameData;
}
- uint64_t GetTimeStamp() const { return mTimeStamp; }
- void SetTimeStamp(uint64_t aTimeStamp) { mTimeStamp = aTimeStamp; }
+ // Timestamp in microseconds
+ uint64_t mTime;
+ // The playback duration of this packet in number of samples or microseconds
+ uint64_t mDuration;
+ // Represent what is in the FrameData
+ FrameType mFrameType;
- uint64_t GetDuration() const { return mDuration; }
- void SetDuration(uint64_t aDuration) { mDuration = aDuration; }
-
- FrameType GetFrameType() const { return mFrameType; }
- void SetFrameType(FrameType aFrameType) { mFrameType = aFrameType; }
+ uint64_t GetEndTime() const {
+ if (mFrameType == OPUS_AUDIO_FRAME) {
+ // TODO: fix this up pending bug 1356054 (can remove videoutils import when this goes)
+ return mTime + FramesToUsecs(mDuration, 48000).value();
+ } else {
+ return mTime + mDuration;
+ }
+ }
private:
// Private destructor, to discourage deletion outside of Release():
~EncodedFrame()
{
}
// Encoded data
nsTArray<uint8_t> mFrameData;
- uint64_t mTimeStamp;
- // The playback duration of this packet in number of samples
- uint64_t mDuration;
- // Represent what is in the FrameData
- FrameType mFrameType;
};
} // namespace mozilla
#endif
--- a/dom/media/encoder/OpusTrackEncoder.cpp
+++ b/dom/media/encoder/OpusTrackEncoder.cpp
@@ -367,17 +367,17 @@ OpusTrackEncoder::GetEncodedTrack(Encode
}
// Possible greatest value of framesToFetch = 3844: see
// https://bugzilla.mozilla.org/show_bug.cgi?id=1349421#c8. frameCopied
// should not be able to exceed this value.
MOZ_ASSERT(frameCopied <= 3844, "frameCopied exceeded expected range");
RefPtr<EncodedFrame> audiodata = new EncodedFrame();
- audiodata->SetFrameType(EncodedFrame::OPUS_AUDIO_FRAME);
+ audiodata->mFrameType = EncodedFrame::OPUS_AUDIO_FRAME;
int framesInPCM = frameCopied;
if (mResampler) {
AutoTArray<AudioDataValue, 9600> resamplingDest;
// We want to consume all the input data, so we slightly oversize the
// resampled data buffer so we can fit the output data in. We cannot really
// predict the output frame count at each call.
uint32_t outframes = frameCopied * kOpusSamplingRate / mSamplingRate + 1;
uint32_t inframes = frameCopied;
@@ -409,20 +409,20 @@ OpusTrackEncoder::GetEncodedTrack(Encode
resamplingDest.Elements(), outframesToCopy * mChannels);
int frameLeftover = outframes - outframesToCopy;
mResampledLeftover.SetLength(frameLeftover * mChannels);
PodCopy(mResampledLeftover.Elements(),
resamplingDest.Elements() + outframesToCopy * mChannels,
mResampledLeftover.Length());
// This is always at 48000Hz.
framesInPCM = framesLeft + outframesToCopy;
- audiodata->SetDuration(framesInPCM);
+ audiodata->mDuration = framesInPCM;
} else {
// The ogg time stamping and pre-skip is always timed at 48000.
- audiodata->SetDuration(frameCopied * (kOpusSamplingRate / mSamplingRate));
+ audiodata->mDuration = frameCopied * (kOpusSamplingRate / mSamplingRate);
}
// Remove the raw data which has been pulled to pcm buffer.
// The value of frameCopied should equal to (or smaller than, if eos)
// GetPacketDuration().
mSourceSegment.RemoveLeading(frameCopied);
// Has reached the end of input stream and all queued data has pulled for
@@ -464,17 +464,17 @@ OpusTrackEncoder::GetEncodedTrack(Encode
speex_resampler_destroy(mResampler);
mResampler = nullptr;
}
mResampledLeftover.SetLength(0);
}
audiodata->SwapInFrameData(frameData);
// timestamp should be the time of the first sample
- audiodata->SetTimeStamp(mOutputTimeStamp);
+ audiodata->mTime = mOutputTimeStamp;
mOutputTimeStamp += FramesToUsecs(GetPacketDuration(), kOpusSamplingRate).value();
LOG("[Opus] mOutputTimeStamp %lld.",mOutputTimeStamp);
aData.AppendEncodedFrame(audiodata);
}
done:
return result >= 0 ? NS_OK : NS_ERROR_FAILURE;
}
--- a/dom/media/encoder/VP8TrackEncoder.cpp
+++ b/dom/media/encoder/VP8TrackEncoder.cpp
@@ -269,25 +269,25 @@ VP8TrackEncoder::GetEncodedPartitions(En
}
break;
}
}
if (!frameData.IsEmpty()) {
// Copy the encoded data to aData.
EncodedFrame* videoData = new EncodedFrame();
- videoData->SetFrameType(frameType);
+ videoData->mFrameType = frameType;
// Convert the timestamp and duration to Usecs.
CheckedInt64 timestamp = FramesToUsecs(pkt->data.frame.pts, mTrackRate);
if (!timestamp.isValid()) {
NS_ERROR("Microsecond timestamp overflow");
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
}
- videoData->SetTimeStamp((uint64_t)timestamp.value());
+ videoData->mTime = (uint64_t)timestamp.value();
mExtractedDuration += pkt->data.frame.duration;
if (!mExtractedDuration.isValid()) {
NS_ERROR("Duration overflow");
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
}
CheckedInt64 totalDuration =
@@ -299,22 +299,22 @@ VP8TrackEncoder::GetEncodedPartitions(En
CheckedInt64 duration = totalDuration - mExtractedDurationUs;
if (!duration.isValid()) {
NS_ERROR("Duration overflow");
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
}
mExtractedDurationUs = totalDuration;
- videoData->SetDuration((uint64_t)duration.value());
+ videoData->mDuration = (uint64_t)duration.value();
videoData->SwapInFrameData(frameData);
VP8LOG(LogLevel::Verbose,
"GetEncodedPartitions TimeStamp %" PRIu64 ", Duration %" PRIu64 ", FrameType %d",
- videoData->GetTimeStamp(), videoData->GetDuration(),
- videoData->GetFrameType());
+ videoData->mTime, videoData->mDuration,
+ videoData->mFrameType);
aData.AppendEncodedFrame(videoData);
}
return pkt ? NS_OK : NS_ERROR_NOT_AVAILABLE;
}
static bool isYUV420(const PlanarYCbCrImage::Data *aData)
{
@@ -665,18 +665,18 @@ VP8TrackEncoder::GetEncodedTrack(Encoded
CheckedInt64 totalDuration = FramesToUsecs(mExtractedDuration.value(), mTrackRate);
CheckedInt64 skippedDuration = totalDuration - mExtractedDurationUs;
mExtractedDurationUs = totalDuration;
if (!skippedDuration.isValid()) {
NS_ERROR("skipped duration overflow");
return NS_ERROR_DOM_MEDIA_OVERFLOW_ERR;
}
- last->SetDuration(last->GetDuration() +
- (static_cast<uint64_t>(skippedDuration.value())));
+ last->mDuration = last->mDuration +
+ (static_cast<uint64_t>(skippedDuration.value()));
}
}
// Move forward the mEncodedTimestamp.
mEncodedTimestamp += chunk.GetDuration();
totalProcessedDuration += chunk.GetDuration();
// Check what to do next.
--- a/dom/media/gtest/TestVideoTrackEncoder.cpp
+++ b/dom/media/gtest/TestVideoTrackEncoder.cpp
@@ -323,21 +323,21 @@ TEST(VP8VideoTrackEncoder, SingleFrameEn
EXPECT_TRUE(encoder.IsEncodingComplete());
// Read out encoded data, and verify.
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
const size_t oneElement = 1;
ASSERT_EQ(oneElement, frames.Length());
- EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->GetFrameType()) <<
+ EXPECT_EQ(EncodedFrame::VP8_I_FRAME, frames[0]->mFrameType) <<
"We only have one frame, so it should be a keyframe";
const uint64_t halfSecond = PR_USEC_PER_SEC / 2;
- EXPECT_EQ(halfSecond, frames[0]->GetDuration());
+ EXPECT_EQ(halfSecond, frames[0]->mDuration);
}
// Test that encoding a couple of identical images gives useful output.
TEST(VP8VideoTrackEncoder, SameFrameEncode)
{
// Initiate VP8 encoder
TestVP8TrackEncoder encoder;
InitParam param = {true, 640, 480};
@@ -367,17 +367,17 @@ TEST(VP8VideoTrackEncoder, SameFrameEnco
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 1.5s.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
- totalDuration += frame->GetDuration();
+ totalDuration += frame->mDuration;
}
const uint64_t oneAndAHalf = (PR_USEC_PER_SEC / 2) * 3;
EXPECT_EQ(oneAndAHalf, totalDuration);
}
// Test encoding a track that starts with null data
TEST(VP8VideoTrackEncoder, NullFrameFirst)
{
@@ -418,17 +418,17 @@ TEST(VP8VideoTrackEncoder, NullFrameFirs
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 0.3s.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
- totalDuration += frame->GetDuration();
+ totalDuration += frame->mDuration;
}
const uint64_t pointThree = (PR_USEC_PER_SEC / 10) * 3;
EXPECT_EQ(pointThree, totalDuration);
}
// Test encoding a track that has to skip frames.
TEST(VP8VideoTrackEncoder, SkippedFrames)
{
@@ -461,17 +461,17 @@ TEST(VP8VideoTrackEncoder, SkippedFrames
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 100 * 1ms = 100ms.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
- totalDuration += frame->GetDuration();
+ totalDuration += frame->mDuration;
}
const uint64_t hundredMillis = PR_USEC_PER_SEC / 10;
EXPECT_EQ(hundredMillis, totalDuration);
}
// Test encoding a track with frames subject to rounding errors.
TEST(VP8VideoTrackEncoder, RoundingErrorFramesEncode)
{
@@ -513,17 +513,17 @@ TEST(VP8VideoTrackEncoder, RoundingError
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 1s.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
- totalDuration += frame->GetDuration();
+ totalDuration += frame->mDuration;
}
const uint64_t oneSecond= PR_USEC_PER_SEC;
EXPECT_EQ(oneSecond, totalDuration);
}
// EOS test
TEST(VP8VideoTrackEncoder, EncodeComplete)
{
--- a/dom/media/gtest/TestWebMWriter.cpp
+++ b/dom/media/gtest/TestWebMWriter.cpp
@@ -73,19 +73,19 @@ public:
// previous cluster so that we can retrieve data by |GetContainerData|.
void AppendDummyFrame(EncodedFrame::FrameType aFrameType,
uint64_t aDuration) {
EncodedFrameContainer encodedVideoData;
nsTArray<uint8_t> frameData;
RefPtr<EncodedFrame> videoData = new EncodedFrame();
// Create dummy frame data.
frameData.SetLength(FIXED_FRAMESIZE);
- videoData->SetFrameType(aFrameType);
- videoData->SetTimeStamp(mTimestamp);
- videoData->SetDuration(aDuration);
+ videoData->mFrameType = aFrameType;
+ videoData->mTime = mTimestamp;
+ videoData->mDuration = aDuration;
videoData->SwapInFrameData(frameData);
encodedVideoData.AppendEncodedFrame(videoData);
WriteEncodedTrack(encodedVideoData, 0);
mTimestamp += aDuration;
}
bool HaveValidCluster() {
nsTArray<nsTArray<uint8_t> > encodedBuf;
--- a/dom/media/ogg/OggWriter.cpp
+++ b/dom/media/ogg/OggWriter.cpp
@@ -58,24 +58,24 @@ nsresult
OggWriter::WriteEncodedTrack(const EncodedFrameContainer& aData,
uint32_t aFlags)
{
PROFILER_LABEL("OggWriter", "WriteEncodedTrack",
js::ProfileEntry::Category::OTHER);
uint32_t len = aData.GetEncodedFrames().Length();
for (uint32_t i = 0; i < len; i++) {
- if (aData.GetEncodedFrames()[i]->GetFrameType() != EncodedFrame::OPUS_AUDIO_FRAME) {
+ if (aData.GetEncodedFrames()[i]->mFrameType != EncodedFrame::OPUS_AUDIO_FRAME) {
LOG("[OggWriter] wrong encoded data type!");
return NS_ERROR_FAILURE;
}
// only pass END_OF_STREAM on the last frame!
nsresult rv = WriteEncodedData(aData.GetEncodedFrames()[i]->GetFrameData(),
- aData.GetEncodedFrames()[i]->GetDuration(),
+ aData.GetEncodedFrames()[i]->mDuration,
i < len-1 ? (aFlags & ~ContainerWriter::END_OF_STREAM) :
aFlags);
if (NS_FAILED(rv)) {
LOG("%p Failed to WriteEncodedTrack!", this);
return rv;
}
}
return NS_OK;
--- a/dom/media/webm/EbmlComposer.cpp
+++ b/dom/media/webm/EbmlComposer.cpp
@@ -119,25 +119,25 @@ void EbmlComposer::FinishCluster()
}
void
EbmlComposer::WriteSimpleBlock(EncodedFrame* aFrame)
{
EbmlGlobal ebml;
ebml.offset = 0;
- auto frameType = aFrame->GetFrameType();
+ auto frameType = aFrame->mFrameType;
bool flush = false;
bool isVP8IFrame = (frameType == EncodedFrame::FrameType::VP8_I_FRAME);
if (isVP8IFrame) {
FinishCluster();
flush = true;
} else {
// Force it to calculate timecode using signed math via cast
- int64_t timeCode = (aFrame->GetTimeStamp() / ((int) PR_USEC_PER_MSEC) - mClusterTimecode) +
+ int64_t timeCode = (aFrame->mTime / ((int) PR_USEC_PER_MSEC) - mClusterTimecode);
(mCodecDelay / PR_NSEC_PER_MSEC);
if (timeCode < SHRT_MIN || timeCode > SHRT_MAX ) {
// We're probably going to overflow (or underflow) the timeCode value later!
FinishCluster();
flush = true;
}
}
@@ -148,24 +148,24 @@ EbmlComposer::WriteSimpleBlock(EncodedFr
if (flush) {
EbmlLoc ebmlLoc;
Ebml_StartSubElement(&ebml, &ebmlLoc, Cluster);
MOZ_ASSERT(mClusterBuffs.Length() > 0);
// current cluster header array index
mClusterHeaderIndex = mClusterBuffs.Length() - 1;
mClusterLengthLoc = ebmlLoc.offset;
// if timeCode didn't under/overflow before, it shouldn't after this
- mClusterTimecode = aFrame->GetTimeStamp() / PR_USEC_PER_MSEC;
+ mClusterTimecode = aFrame->mTime / PR_USEC_PER_MSEC;
Ebml_SerializeUnsigned(&ebml, Timecode, mClusterTimecode);
mFlushState |= FLUSH_CLUSTER;
}
bool isOpus = (frameType == EncodedFrame::FrameType::OPUS_AUDIO_FRAME);
// Can't underflow/overflow now
- int64_t timeCode = aFrame->GetTimeStamp() / ((int) PR_USEC_PER_MSEC) - mClusterTimecode;
+ int64_t timeCode = aFrame->mTime / ((int) PR_USEC_PER_MSEC) - mClusterTimecode;
if (isOpus) {
timeCode += mCodecDelay / PR_NSEC_PER_MSEC;
}
MOZ_ASSERT(timeCode >= SHRT_MIN && timeCode <= SHRT_MAX);
writeSimpleBlock(&ebml, isOpus ? 0x2 : 0x1, static_cast<short>(timeCode), isVP8IFrame,
0, 0, (unsigned char*)aFrame->GetFrameData().Elements(),
aFrame->GetFrameData().Length());
MOZ_ASSERT(ebml.offset <= DEFAULT_HEADER_SIZE +