Bug 1243611 - When EOS, call vpx_codec_encode correctly. r=rillian
MozReview-Commit-ID: IzrDAOD11r9
--- a/dom/media/encoder/VP8TrackEncoder.cpp
+++ b/dom/media/encoder/VP8TrackEncoder.cpp
@@ -162,17 +162,17 @@ VP8TrackEncoder::GetMetadata()
meta->mHeight = mFrameHeight;
meta->mDisplayWidth = mDisplayWidth;
meta->mDisplayHeight = mDisplayHeight;
meta->mEncodedFrameRate = mEncodedFrameRate;
return meta.forget();
}
-nsresult
+bool
VP8TrackEncoder::GetEncodedPartitions(EncodedFrameContainer& aData)
{
vpx_codec_iter_t iter = nullptr;
EncodedFrame::FrameType frameType = EncodedFrame::VP8_P_FRAME;
nsTArray<uint8_t> frameData;
const vpx_codec_cx_pkt_t *pkt = nullptr;
while ((pkt = vpx_codec_get_cx_data(mVPXContext, &iter)) != nullptr) {
switch (pkt->kind) {
@@ -190,40 +190,37 @@ VP8TrackEncoder::GetEncodedPartitions(En
if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
frameType = EncodedFrame::VP8_I_FRAME;
}
break;
}
}
- if (!frameData.IsEmpty() &&
- (pkt->data.frame.pts == mEncodedTimestamp)) {
+ if (!frameData.IsEmpty()) {
// Copy the encoded data to aData.
EncodedFrame* videoData = new EncodedFrame();
videoData->SetFrameType(frameType);
// Convert the timestamp and duration to Usecs.
- CheckedInt64 timestamp = FramesToUsecs(mEncodedTimestamp, mTrackRate);
+ CheckedInt64 timestamp = FramesToUsecs(pkt->data.frame.pts, mTrackRate);
if (timestamp.isValid()) {
- videoData->SetTimeStamp(
- (uint64_t)FramesToUsecs(mEncodedTimestamp, mTrackRate).value());
+ videoData->SetTimeStamp((uint64_t)timestamp.value());
}
CheckedInt64 duration = FramesToUsecs(pkt->data.frame.duration, mTrackRate);
if (duration.isValid()) {
- videoData->SetDuration(
- (uint64_t)FramesToUsecs(pkt->data.frame.duration, mTrackRate).value());
+ videoData->SetDuration((uint64_t)duration.value());
}
videoData->SwapInFrameData(frameData);
VP8LOG("GetEncodedPartitions TimeStamp %lld Duration %lld\n",
videoData->GetTimeStamp(), videoData->GetDuration());
VP8LOG("frameType %d\n", videoData->GetFrameType());
aData.AppendEncodedFrame(videoData);
}
- return NS_OK;
+ return !!pkt;
}
static bool isYUV420(const PlanarYCbCrImage::Data *aData)
{
if (aData->mYSize == aData->mCbCrSize * 2) {
return true;
}
return false;
@@ -358,17 +355,17 @@ nsresult VP8TrackEncoder::PrepareRawFram
return NS_ERROR_NOT_IMPLEMENTED;
}
if (rv != 0) {
VP8LOG("Converting an %s frame to I420 failed\n", yuvFormat.c_str());
return NS_ERROR_FAILURE;
}
- VP8LOG("Converted an %s frame to I420\n");
+ VP8LOG("Converted an %s frame to I420\n", yuvFormat.c_str());
} else {
// Not YCbCr at all. Try to get access to the raw data and convert.
RefPtr<SourceSurface> surf = img->GetAsSourceSurface();
if (!surf) {
VP8LOG("Getting surface from %s image failed\n", Stringify(format).c_str());
return NS_ERROR_FAILURE;
}
@@ -624,19 +621,23 @@ VP8TrackEncoder::GetEncodedTrack(Encoded
// Remove the chunks we have processed.
mSourceSegment.RemoveLeading(totalProcessedDuration);
VP8LOG("RemoveLeading %lld\n",totalProcessedDuration);
// End of stream, pull the rest frames in encoder.
if (EOS) {
VP8LOG("mEndOfStream is true\n");
mEncodingComplete = true;
- if (vpx_codec_encode(mVPXContext, nullptr, mEncodedTimestamp,
- mEncodedFrameDuration, 0, VPX_DL_REALTIME)) {
- return NS_ERROR_FAILURE;
- }
- GetEncodedPartitions(aData);
+ // Bug 1243611, keep calling vpx_codec_encode and vpx_codec_get_cx_data
+ // until vpx_codec_get_cx_data return null.
+
+ do {
+ if (vpx_codec_encode(mVPXContext, nullptr, mEncodedTimestamp,
+ mEncodedFrameDuration, 0, VPX_DL_REALTIME)) {
+ return NS_ERROR_FAILURE;
+ }
+ } while(GetEncodedPartitions(aData));
}
return NS_OK ;
}
} // namespace mozilla
--- a/dom/media/encoder/VP8TrackEncoder.h
+++ b/dom/media/encoder/VP8TrackEncoder.h
@@ -49,17 +49,19 @@ private:
StreamTime CalculateRemainingTicks(StreamTime aDurationCopied,
StreamTime aEncodedDuration);
// Get the EncodeOperation for next target frame.
EncodeOperation GetNextEncodeOperation(TimeDuration aTimeElapsed,
StreamTime aProcessedDuration);
// Get the encoded data from encoder to aData.
- nsresult GetEncodedPartitions(EncodedFrameContainer& aData);
+ // Return value: false if the vpx_codec_get_cx_data returns null
+ // for EOS detection.
+ bool GetEncodedPartitions(EncodedFrameContainer& aData);
// Prepare the input data to the mVPXImageWrapper for encoding.
nsresult PrepareRawFrame(VideoChunk &aChunk);
// Output frame rate.
uint32_t mEncodedFrameRate;
// Duration for the output frame, reciprocal to mEncodedFrameRate.
StreamTime mEncodedFrameDuration;