--- a/dom/media/MediaRecorder.cpp
+++ b/dom/media/MediaRecorder.cpp
@@ -290,18 +290,20 @@ class MediaRecorder::Session: public nsI
private:
RefPtr<Session> mSession;
};
// For Ensure recorder has tracks to record.
class TracksAvailableCallback : public OnTracksAvailableCallback
{
public:
- explicit TracksAvailableCallback(Session *aSession)
- : mSession(aSession) {}
+ explicit TracksAvailableCallback(Session *aSession, TrackRate aTrackRate)
+ : mSession(aSession)
+ , mTrackRate(aTrackRate) {}
+
virtual void NotifyTracksAvailable(DOMMediaStream* aStream)
{
if (mSession->mStopIssued) {
return;
}
MOZ_RELEASE_ASSERT(aStream);
mSession->MediaStreamReady(*aStream);
@@ -342,20 +344,21 @@ class MediaRecorder::Session: public nsI
// Check that we may access the tracks' content.
if (!mSession->MediaStreamTracksPrincipalSubsumes()) {
LOG(LogLevel::Warning, ("Session.NotifyTracksAvailable MediaStreamTracks principal check failed"));
mSession->DoSessionEndTask(NS_ERROR_DOM_SECURITY_ERR);
return;
}
LOG(LogLevel::Debug, ("Session.NotifyTracksAvailable track type = (%d)", trackTypes));
- mSession->InitEncoder(trackTypes);
+ mSession->InitEncoder(trackTypes, mTrackRate);
}
private:
RefPtr<Session> mSession;
+ TrackRate mTrackRate;
};
// Main thread task.
// To delete RecordingSession object.
class DestroyRunnable : public Runnable
{
public:
explicit DestroyRunnable(Session* aSession)
: mSession(aSession) {}
@@ -407,16 +410,17 @@ class MediaRecorder::Session: public nsI
public:
Session(MediaRecorder* aRecorder, int32_t aTimeSlice)
: mRecorder(aRecorder)
, mTimeSlice(aTimeSlice)
, mStopIssued(false)
, mIsStartEventFired(false)
, mIsRegisterProfiler(false)
, mNeedSessionEndTask(true)
+ , mSelectedVideoTrackID(TRACK_NONE)
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_COUNT_CTOR(MediaRecorder::Session);
uint32_t maxMem = Preferences::GetUint("media.recorder.max_memory",
MAX_ALLOW_MEMORY_BUFFER);
mEncodedBufferCache = new EncodedBufferCache(maxMem);
mLastBlobTimeStamp = TimeStamp::Now();
@@ -462,43 +466,44 @@ public:
void Start()
{
LOG(LogLevel::Debug, ("Session.Start %p", this));
MOZ_ASSERT(NS_IsMainThread());
// Create a Track Union Stream
MediaStreamGraph* gm = mRecorder->GetSourceMediaStream()->Graph();
+ TrackRate trackRate = gm->GraphRate();
mTrackUnionStream = gm->CreateTrackUnionStream();
MOZ_ASSERT(mTrackUnionStream, "CreateTrackUnionStream failed");
mTrackUnionStream->SetAutofinish(true);
DOMMediaStream* domStream = mRecorder->Stream();
if (domStream) {
// Get the available tracks from the DOMMediaStream.
// The callback will report back tracks that we have to connect to
// mTrackUnionStream and listen to principal changes on.
- TracksAvailableCallback* tracksAvailableCallback = new TracksAvailableCallback(this);
+ TracksAvailableCallback* tracksAvailableCallback = new TracksAvailableCallback(this, trackRate);
domStream->OnTracksAvailable(tracksAvailableCallback);
} else {
// Check that we may access the audio node's content.
if (!AudioNodePrincipalSubsumes()) {
LOG(LogLevel::Warning, ("Session.Start AudioNode principal check failed"));
DoSessionEndTask(NS_ERROR_DOM_SECURITY_ERR);
return;
}
// Bind this Track Union Stream with Source Media.
RefPtr<MediaInputPort> inputPort =
mTrackUnionStream->AllocateInputPort(mRecorder->GetSourceMediaStream());
mInputPorts.AppendElement(inputPort.forget());
MOZ_ASSERT(mInputPorts[mInputPorts.Length()-1]);
// Web Audio node has only audio.
- InitEncoder(ContainerWriter::CREATE_AUDIO_TRACK);
+ InitEncoder(ContainerWriter::CREATE_AUDIO_TRACK, trackRate);
}
}
void Stop()
{
LOG(LogLevel::Debug, ("Session.Stop %p", this));
MOZ_ASSERT(NS_IsMainThread());
mStopIssued = true;
@@ -729,17 +734,17 @@ private:
return false;
}
uint32_t perm = nsIPermissionManager::DENY_ACTION;
pm->TestExactPermissionFromPrincipal(doc->NodePrincipal(), aType, &perm);
return perm == nsIPermissionManager::ALLOW_ACTION;
}
- void InitEncoder(uint8_t aTrackTypes)
+ void InitEncoder(uint8_t aTrackTypes, TrackRate aTrackRate)
{
LOG(LogLevel::Debug, ("Session.InitEncoder %p", this));
MOZ_ASSERT(NS_IsMainThread());
if (!mRecorder) {
LOG(LogLevel::Debug, ("Session.InitEncoder failure, mRecorder is null %p", this));
return;
}
@@ -747,52 +752,64 @@ private:
// At this stage, the API doesn't allow UA to choose the output mimeType format.
// Make sure the application has permission to assign AUDIO_3GPP
if (mRecorder->mMimeType.EqualsLiteral(AUDIO_3GPP) && CheckPermission("audio-capture:3gpp")) {
mEncoder = MediaEncoder::CreateEncoder(NS_LITERAL_STRING(AUDIO_3GPP),
mRecorder->GetAudioBitrate(),
mRecorder->GetVideoBitrate(),
mRecorder->GetBitrate(),
- aTrackTypes);
+ aTrackTypes, aTrackRate);
} else if (mRecorder->mMimeType.EqualsLiteral(AUDIO_3GPP2) && CheckPermission("audio-capture:3gpp2")) {
mEncoder = MediaEncoder::CreateEncoder(NS_LITERAL_STRING(AUDIO_3GPP2),
mRecorder->GetAudioBitrate(),
mRecorder->GetVideoBitrate(),
mRecorder->GetBitrate(),
- aTrackTypes);
+ aTrackTypes, aTrackRate);
} else {
mEncoder = MediaEncoder::CreateEncoder(NS_LITERAL_STRING(""),
mRecorder->GetAudioBitrate(),
mRecorder->GetVideoBitrate(),
mRecorder->GetBitrate(),
- aTrackTypes);
+ aTrackTypes, aTrackRate);
}
if (!mEncoder) {
LOG(LogLevel::Debug, ("Session.InitEncoder !mEncoder %p", this));
DoSessionEndTask(NS_ERROR_ABORT);
return;
}
// Media stream is ready but UA issues a stop method follow by start method.
// The Session::stop would clean the mTrackUnionStream. If the AfterTracksAdded
// comes after stop command, this function would crash.
if (!mTrackUnionStream) {
LOG(LogLevel::Debug, ("Session.InitEncoder !mTrackUnionStream %p", this));
DoSessionEndTask(NS_OK);
return;
}
- mTrackUnionStream->AddListener(mEncoder);
+ mTrackUnionStream->AddListener(mEncoder.get());
+
+ nsTArray<RefPtr<mozilla::dom::VideoStreamTrack>> videoTracks;
+ DOMMediaStream* domStream = mRecorder->Stream();
+ if (domStream) {
+ domStream->GetVideoTracks(videoTracks);
+ if (!videoTracks.IsEmpty()) {
+ // Right now, the MediaRecorder hasn't dealt with multiple video track
+ // issues. So we just bind with the first video track. Bug 1276928 is
+ // the following.
+ videoTracks[0]->AddDirectListener(mEncoder->GetVideoSink());
+ }
+ }
+
// Try to use direct listeners if possible
- DOMMediaStream* domStream = mRecorder->Stream();
if (domStream && domStream->GetInputStream()) {
mInputStream = domStream->GetInputStream()->AsSourceStream();
if (mInputStream) {
- mInputStream->AddDirectListener(mEncoder);
+ mInputStream->AddDirectListener(mEncoder.get());
mEncoder->SetDirectConnect(true);
}
}
// Create a thread to read encode media data from MediaEncoder.
if (!mReadThread) {
nsresult rv = NS_NewNamedThread("Media_Encoder", getter_AddRefs(mReadThread));
if (NS_FAILED(rv)) {
@@ -842,29 +859,34 @@ private:
MOZ_ASSERT(false, "NS_DispatchToMainThread DestroyRunnable failed");
}
mNeedSessionEndTask = false;
}
void CleanupStreams()
{
if (mInputStream) {
if (mEncoder) {
- mInputStream->RemoveDirectListener(mEncoder);
+ mInputStream->RemoveDirectListener(mEncoder.get());
}
mInputStream = nullptr;
}
for (RefPtr<MediaInputPort>& inputPort : mInputPorts) {
MOZ_ASSERT(inputPort);
inputPort->Destroy();
}
mInputPorts.Clear();
if (mTrackUnionStream) {
+ // Sometimes the MediaEncoder might be initialized fail and go to
+ // |CleanupStreams|. So the mEncoder might be a nullptr in this case.
+ if (mEncoder && mSelectedVideoTrackID != TRACK_NONE) {
+ mTrackUnionStream->RemoveVideoOutput(mEncoder->GetVideoSink(), mSelectedVideoTrackID);
+ }
if (mEncoder) {
- mTrackUnionStream->RemoveListener(mEncoder);
+ mTrackUnionStream->RemoveListener(mEncoder.get());
}
mTrackUnionStream->Destroy();
mTrackUnionStream = nullptr;
}
if (mMediaStream) {
mMediaStream->UnregisterTrackListener(this);
mMediaStream = nullptr;
@@ -943,16 +965,17 @@ private:
// Indicate the session had fire start event. Encoding thread only.
bool mIsStartEventFired;
// The register flag for "Media_Encoder" thread to profiler
bool mIsRegisterProfiler;
// False if the InitEncoder called successfully, ensure the
// ExtractRunnable/DestroyRunnable will end the session.
// Main thread only.
bool mNeedSessionEndTask;
+ TrackID mSelectedVideoTrackID;
};
NS_IMPL_ISUPPORTS(MediaRecorder::Session, nsIObserver)
MediaRecorder::~MediaRecorder()
{
if (mPipeStream != nullptr) {
mInputPort->Destroy();
--- a/dom/media/encoder/MediaEncoder.cpp
+++ b/dom/media/encoder/MediaEncoder.cpp
@@ -29,16 +29,23 @@
#endif
mozilla::LazyLogModule gMediaEncoderLog("MediaEncoder");
#define LOG(type, msg) MOZ_LOG(gMediaEncoderLog, type, msg)
namespace mozilla {
void
+MediaStreamVideoRecorderSink::SetCurrentFrames(const VideoSegment& aSegment)
+{
+ MOZ_ASSERT(mVideoEncoder);
+ mVideoEncoder->SetCurrentFrames(aSegment);
+}
+
+void
MediaEncoder::SetDirectConnect(bool aConnected)
{
mDirectConnected = aConnected;
}
void
MediaEncoder::NotifyRealtimeData(MediaStreamGraph* aGraph,
TrackID aID,
@@ -48,18 +55,19 @@ MediaEncoder::NotifyRealtimeData(MediaSt
{
if (mSuspended == RECORD_NOT_SUSPENDED) {
// Process the incoming raw track data from MediaStreamGraph, called on the
// thread of MediaStreamGraph.
if (mAudioEncoder && aRealtimeMedia.GetType() == MediaSegment::AUDIO) {
mAudioEncoder->NotifyQueuedTrackChanges(aGraph, aID,
aTrackOffset, aTrackEvents,
aRealtimeMedia);
-
- } else if (mVideoEncoder && aRealtimeMedia.GetType() == MediaSegment::VIDEO) {
+ } else if (mVideoEncoder &&
+ aRealtimeMedia.GetType() == MediaSegment::VIDEO &&
+ aTrackEvents != TrackEventCommand::TRACK_EVENT_NONE) {
mVideoEncoder->NotifyQueuedTrackChanges(aGraph, aID,
aTrackOffset, aTrackEvents,
aRealtimeMedia);
}
}
}
void
@@ -136,17 +144,18 @@ MediaEncoder::NotifyEvent(MediaStreamGra
mVideoEncoder->NotifyEvent(aGraph, event);
}
}
/* static */
already_AddRefed<MediaEncoder>
MediaEncoder::CreateEncoder(const nsAString& aMIMEType, uint32_t aAudioBitrate,
uint32_t aVideoBitrate, uint32_t aBitrate,
- uint8_t aTrackTypes)
+ uint8_t aTrackTypes,
+ TrackRate aTrackRate)
{
PROFILER_LABEL("MediaEncoder", "CreateEncoder",
js::ProfileEntry::Category::OTHER);
nsAutoPtr<ContainerWriter> writer;
nsAutoPtr<AudioTrackEncoder> audioEncoder;
nsAutoPtr<VideoTrackEncoder> videoEncoder;
RefPtr<MediaEncoder> encoder;
@@ -159,32 +168,32 @@ MediaEncoder::CreateEncoder(const nsAStr
else if (MediaEncoder::IsWebMEncoderEnabled() &&
(aMIMEType.EqualsLiteral(VIDEO_WEBM) ||
(aTrackTypes & ContainerWriter::CREATE_VIDEO_TRACK))) {
if (aTrackTypes & ContainerWriter::CREATE_AUDIO_TRACK
&& MediaDecoder::IsOpusEnabled()) {
audioEncoder = new OpusTrackEncoder();
NS_ENSURE_TRUE(audioEncoder, nullptr);
}
- videoEncoder = new VP8TrackEncoder();
+ videoEncoder = new VP8TrackEncoder(aTrackRate);
writer = new WebMWriter(aTrackTypes);
NS_ENSURE_TRUE(writer, nullptr);
NS_ENSURE_TRUE(videoEncoder, nullptr);
mimeType = NS_LITERAL_STRING(VIDEO_WEBM);
}
#endif //MOZ_WEBM_ENCODER
#ifdef MOZ_OMX_ENCODER
else if (MediaEncoder::IsOMXEncoderEnabled() &&
(aMIMEType.EqualsLiteral(VIDEO_MP4) ||
(aTrackTypes & ContainerWriter::CREATE_VIDEO_TRACK))) {
if (aTrackTypes & ContainerWriter::CREATE_AUDIO_TRACK) {
audioEncoder = new OmxAACAudioTrackEncoder();
NS_ENSURE_TRUE(audioEncoder, nullptr);
}
- videoEncoder = new OmxVideoTrackEncoder();
+ videoEncoder = new OmxVideoTrackEncoder(aTrackRate);
writer = new ISOMediaWriter(aTrackTypes);
NS_ENSURE_TRUE(writer, nullptr);
NS_ENSURE_TRUE(videoEncoder, nullptr);
mimeType = NS_LITERAL_STRING(VIDEO_MP4);
} else if (MediaEncoder::IsOMXEncoderEnabled() &&
(aMIMEType.EqualsLiteral(AUDIO_3GPP))) {
audioEncoder = new OmxAMRAudioTrackEncoder();
NS_ENSURE_TRUE(audioEncoder, nullptr);
--- a/dom/media/encoder/MediaEncoder.h
+++ b/dom/media/encoder/MediaEncoder.h
@@ -4,25 +4,42 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MediaEncoder_h_
#define MediaEncoder_h_
#include "mozilla/DebugOnly.h"
#include "TrackEncoder.h"
#include "ContainerWriter.h"
+#include "CubebUtils.h"
#include "MediaStreamGraph.h"
#include "MediaStreamListener.h"
#include "nsAutoPtr.h"
+#include "MediaStreamVideoSink.h"
#include "nsIMemoryReporter.h"
#include "mozilla/MemoryReporting.h"
#include "mozilla/Atomics.h"
namespace mozilla {
+class MediaStreamVideoRecorderSink : public MediaStreamVideoSink
+{
+public:
+ explicit MediaStreamVideoRecorderSink(VideoTrackEncoder* aEncoder)
+ : mVideoEncoder(aEncoder) {}
+
+ // MediaStreamVideoSink methods
+ virtual void SetCurrentFrames(const VideoSegment& aSegment) override;
+ virtual void ClearFrames() override {}
+
+private:
+ virtual ~MediaStreamVideoRecorderSink() {}
+ VideoTrackEncoder* mVideoEncoder;
+};
+
/**
* MediaEncoder is the framework of encoding module, it controls and manages
* procedures between ContainerWriter and TrackEncoder. ContainerWriter packs
* the encoded track data with a specific container (e.g. ogg, mp4).
* AudioTrackEncoder and VideoTrackEncoder are subclasses of TrackEncoder, and
* are responsible for encoding raw data coming from MediaStreamGraph.
*
* Also, MediaEncoder is a type of MediaStreamListener, it starts to receive raw
@@ -49,16 +66,17 @@ namespace mozilla {
* 3) To start encoding, add this component to its source stream.
* => sourceStream->AddListener(encoder);
*
* 4) To stop encoding, remove this component from its source stream.
* => sourceStream->RemoveListener(encoder);
*/
class MediaEncoder : public DirectMediaStreamListener
{
+ friend class MediaStreamVideoRecorderSink;
public :
enum {
ENCODE_METADDATA,
ENCODE_TRACK,
ENCODE_DONE,
ENCODE_ERROR,
};
@@ -67,16 +85,17 @@ public :
VideoTrackEncoder* aVideoEncoder,
const nsAString& aMIMEType,
uint32_t aAudioBitrate,
uint32_t aVideoBitrate,
uint32_t aBitrate)
: mWriter(aWriter)
, mAudioEncoder(aAudioEncoder)
, mVideoEncoder(aVideoEncoder)
+ , mVideoSink(new MediaStreamVideoRecorderSink(mVideoEncoder))
, mStartTime(TimeStamp::Now())
, mMIMEType(aMIMEType)
, mSizeOfBuffer(0)
, mState(MediaEncoder::ENCODE_METADDATA)
, mShutdown(false)
, mDirectConnected(false)
, mSuspended(false)
{}
@@ -150,17 +169,18 @@ public :
/**
* Creates an encoder with a given MIME type. Returns null if we are unable
* to create the encoder. For now, default aMIMEType to "audio/ogg" and use
* Ogg+Opus if it is empty.
*/
static already_AddRefed<MediaEncoder> CreateEncoder(const nsAString& aMIMEType,
uint32_t aAudioBitrate, uint32_t aVideoBitrate,
uint32_t aBitrate,
- uint8_t aTrackTypes = ContainerWriter::CREATE_AUDIO_TRACK);
+ uint8_t aTrackTypes = ContainerWriter::CREATE_AUDIO_TRACK,
+ TrackRate aTrackRate = CubebUtils::PreferredSampleRate());
/**
* Encodes the raw track data and returns the final container data. Assuming
* it is called on a single worker thread. The buffer of container data is
* allocated in ContainerWriter::GetContainerData(), and is appended to
* aOutputBufs. aMIMEType is the valid mime-type of this returned container
* data.
*/
void GetEncodedData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
@@ -203,24 +223,29 @@ public :
MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf)
/*
* Measure the size of the buffer, and memory occupied by mAudioEncoder
* and mVideoEncoder
*/
size_t SizeOfExcludingThis(mozilla::MallocSizeOf aMallocSizeOf) const;
+ MediaStreamVideoRecorderSink* GetVideoSink() {
+ return mVideoSink.get();
+ }
+
private:
// Get encoded data from trackEncoder and write to muxer
nsresult WriteEncodedDataToMuxer(TrackEncoder *aTrackEncoder);
// Get metadata from trackEncoder and copy to muxer
nsresult CopyMetadataToMuxer(TrackEncoder* aTrackEncoder);
nsAutoPtr<ContainerWriter> mWriter;
nsAutoPtr<AudioTrackEncoder> mAudioEncoder;
nsAutoPtr<VideoTrackEncoder> mVideoEncoder;
+ RefPtr<MediaStreamVideoRecorderSink> mVideoSink;
TimeStamp mStartTime;
nsString mMIMEType;
int64_t mSizeOfBuffer;
int mState;
bool mShutdown;
bool mDirectConnected;
Atomic<int> mSuspended;
// Get duration from create encoder, for logging purpose
--- a/dom/media/encoder/OmxTrackEncoder.cpp
+++ b/dom/media/encoder/OmxTrackEncoder.cpp
@@ -21,30 +21,29 @@
using namespace android;
namespace mozilla {
#define ENCODER_CONFIG_FRAME_RATE 30 // fps
#define GET_ENCODED_VIDEO_FRAME_TIMEOUT 100000 // microseconds
-OmxVideoTrackEncoder::OmxVideoTrackEncoder()
- : VideoTrackEncoder()
+OmxVideoTrackEncoder::OmxVideoTrackEncoder(TrackRate aTrackRate)
+ : VideoTrackEncoder(aTrackRate)
{}
OmxVideoTrackEncoder::~OmxVideoTrackEncoder()
{}
nsresult
OmxVideoTrackEncoder::Init(int aWidth, int aHeight, int aDisplayWidth,
- int aDisplayHeight, TrackRate aTrackRate)
+ int aDisplayHeight)
{
mFrameWidth = aWidth;
mFrameHeight = aHeight;
- mTrackRate = aTrackRate;
mDisplayWidth = aDisplayWidth;
mDisplayHeight = aDisplayHeight;
mEncoder = OMXCodecWrapper::CreateAVCEncoder();
NS_ENSURE_TRUE(mEncoder, NS_ERROR_FAILURE);
nsresult rv = mEncoder->Configure(mFrameWidth, mFrameHeight,
ENCODER_CONFIG_FRAME_RATE);
--- a/dom/media/encoder/OmxTrackEncoder.h
+++ b/dom/media/encoder/OmxTrackEncoder.h
@@ -22,27 +22,26 @@ class OMXAudioEncoder;
* Bean platform.
*/
namespace mozilla {
class OmxVideoTrackEncoder: public VideoTrackEncoder
{
public:
- OmxVideoTrackEncoder();
+ explicit OmxVideoTrackEncoder(TrackRate aTrackRate);
~OmxVideoTrackEncoder();
already_AddRefed<TrackMetadataBase> GetMetadata() override;
nsresult GetEncodedTrack(EncodedFrameContainer& aData) override;
protected:
nsresult Init(int aWidth, int aHeight,
- int aDisplayWidth, int aDisplayHeight,
- TrackRate aTrackRate) override;
+ int aDisplayWidth, int aDisplayHeight) override;
private:
nsAutoPtr<android::OMXVideoEncoder> mEncoder;
};
class OmxAudioTrackEncoder : public AudioTrackEncoder
{
public:
--- a/dom/media/encoder/TrackEncoder.cpp
+++ b/dom/media/encoder/TrackEncoder.cpp
@@ -188,67 +188,83 @@ AudioTrackEncoder::DeInterleaveTrackData
size_t
AudioTrackEncoder::SizeOfExcludingThis(mozilla::MallocSizeOf aMallocSizeOf) const
{
return mRawSegment.SizeOfExcludingThis(aMallocSizeOf);
}
void
+VideoTrackEncoder::Init(const VideoSegment& aSegment)
+{
+ if (mInitialized) {
+ return;
+ }
+
+ mInitCounter++;
+ TRACK_LOG(LogLevel::Debug, ("Init the video encoder %d times", mInitCounter));
+ VideoSegment::ConstChunkIterator iter(aSegment);
+ while (!iter.IsEnded()) {
+ VideoChunk chunk = *iter;
+ if (!chunk.IsNull()) {
+ gfx::IntSize imgsize = chunk.mFrame.GetImage()->GetSize();
+ gfx::IntSize intrinsicSize = chunk.mFrame.GetIntrinsicSize();
+ nsresult rv = Init(imgsize.width, imgsize.height,
+ intrinsicSize.width, intrinsicSize.height);
+
+ if (NS_FAILED(rv)) {
+ LOG("[VideoTrackEncoder]: Fail to initialize the encoder!");
+ NotifyCancel();
+ }
+ break;
+ }
+
+ iter.Next();
+ }
+}
+
+void
+VideoTrackEncoder::SetCurrentFrames(const VideoSegment& aSegment)
+{
+ if (mCanceled) {
+ return;
+ }
+
+ Init(aSegment);
+ AppendVideoSegment(aSegment);
+}
+
+void
VideoTrackEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph,
TrackID aID,
StreamTime aTrackOffset,
uint32_t aTrackEvents,
const MediaSegment& aQueuedMedia)
{
if (mCanceled) {
return;
}
+ if (!(aTrackEvents == TRACK_EVENT_CREATED ||
+ aTrackEvents == TRACK_EVENT_ENDED)) {
+ return;
+ }
+
const VideoSegment& video = static_cast<const VideoSegment&>(aQueuedMedia);
// Check and initialize parameters for codec encoder.
- if (!mInitialized) {
- mInitCounter++;
- TRACK_LOG(LogLevel::Debug, ("Init the video encoder %d times", mInitCounter));
- VideoSegment::ChunkIterator iter(const_cast<VideoSegment&>(video));
- while (!iter.IsEnded()) {
- VideoChunk chunk = *iter;
- if (!chunk.IsNull()) {
- gfx::IntSize imgsize = chunk.mFrame.GetImage()->GetSize();
- gfx::IntSize intrinsicSize = chunk.mFrame.GetIntrinsicSize();
- nsresult rv = Init(imgsize.width, imgsize.height,
- intrinsicSize.width, intrinsicSize.height,
- aGraph->GraphRate());
- if (NS_FAILED(rv)) {
- LOG("[VideoTrackEncoder]: Fail to initialize the encoder!");
- NotifyCancel();
- }
- break;
- }
-
- iter.Next();
- }
-
- mNotInitDuration += aQueuedMedia.GetDuration();
- if (!mInitialized &&
- (mNotInitDuration / aGraph->GraphRate() > INIT_FAILED_DURATION) &&
- mInitCounter > 1) {
- LOG("[VideoTrackEncoder]: Initialize failed for 30s.");
- NotifyEndOfStream();
- return;
- }
- }
+ Init(video);
AppendVideoSegment(video);
// The stream has stopped and reached the end of track.
if (aTrackEvents == TrackEventCommand::TRACK_EVENT_ENDED) {
LOG("[VideoTrackEncoder]: Receive TRACK_EVENT_ENDED .");
NotifyEndOfStream();
+ mFirstFrame = true;
}
}
nsresult
VideoTrackEncoder::AppendVideoSegment(const VideoSegment& aSegment)
{
ReentrantMonitorAutoEnter mon(mReentrantMonitor);
@@ -261,30 +277,45 @@ VideoTrackEncoder::AppendVideoSegment(co
mTotalFrameDuration += chunk.GetDuration();
mLastFrameDuration += chunk.GetDuration();
// Send only the unique video frames for encoding.
// Or if we got the same video chunks more than 1 seconds,
// force to send into encoder.
if ((mLastFrame != chunk.mFrame) ||
(mLastFrameDuration >= mTrackRate)) {
RefPtr<layers::Image> image = chunk.mFrame.GetImage();
+
+ // Fixme: see bug 1290777. We should remove the useage of duration here and
+ // in |GetEncodedTrack|.
+ StreamTime duration;
+ if (mFirstFrame)
+ {
+ duration = chunk.GetDuration();
+ mFirstFrame = false;
+ } else {
+ MOZ_ASSERT(chunk.mTimeStamp >= mLastFrameTimeStamp);
+ TimeDuration timeDuration = chunk.mTimeStamp - mLastFrameTimeStamp;
+ duration = SecondsToMediaTime(timeDuration.ToSeconds());
+ }
+
// Because we may get chunks with a null image (due to input blocking),
// accumulate duration and give it to the next frame that arrives.
// Canonically incorrect - the duration should go to the previous frame
// - but that would require delaying until the next frame arrives.
// Best would be to do like OMXEncoder and pass an effective timestamp
// in with each frame.
if (image) {
mRawSegment.AppendFrame(image.forget(),
- mLastFrameDuration,
+ duration,
chunk.mFrame.GetIntrinsicSize(),
PRINCIPAL_HANDLE_NONE,
chunk.mFrame.GetForceBlack());
mLastFrameDuration = 0;
}
+ mLastFrameTimeStamp = chunk.mTimeStamp;
}
mLastFrame.TakeFrom(&chunk.mFrame);
iter.Next();
}
if (mRawSegment.GetDuration() > 0) {
mReentrantMonitor.NotifyAll();
}
@@ -294,17 +325,17 @@ VideoTrackEncoder::AppendVideoSegment(co
void
VideoTrackEncoder::NotifyEndOfStream()
{
// If source video track is muted till the end of encoding, initialize the
// encoder with default frame width, frame height, and track rate.
if (!mCanceled && !mInitialized) {
Init(DEFAULT_FRAME_WIDTH, DEFAULT_FRAME_HEIGHT,
- DEFAULT_FRAME_WIDTH, DEFAULT_FRAME_HEIGHT, DEFAULT_TRACK_RATE);
+ DEFAULT_FRAME_WIDTH, DEFAULT_FRAME_HEIGHT);
}
ReentrantMonitorAutoEnter mon(mReentrantMonitor);
mEndOfStream = true;
mReentrantMonitor.NotifyAll();
}
size_t
--- a/dom/media/encoder/TrackEncoder.h
+++ b/dom/media/encoder/TrackEncoder.h
@@ -243,26 +243,28 @@ protected:
AudioSegment mRawSegment;
uint32_t mAudioBitrate;
};
class VideoTrackEncoder : public TrackEncoder
{
public:
- VideoTrackEncoder()
+ explicit VideoTrackEncoder(TrackRate aTrackRate)
: TrackEncoder()
, mFrameWidth(0)
, mFrameHeight(0)
, mDisplayWidth(0)
, mDisplayHeight(0)
- , mTrackRate(0)
+ , mTrackRate(aTrackRate)
, mTotalFrameDuration(0)
, mLastFrameDuration(0)
, mVideoBitrate(0)
+ , mLastFrameTimeStamp(TimeStamp::Now())
+ , mFirstFrame(true)
{}
/**
* Notified by the same callback of MediaEncoder when it has received a track
* change from MediaStreamGraph. Called on the MediaStreamGraph thread.
*/
void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
StreamTime aTrackOffset,
@@ -272,26 +274,38 @@ public:
* Measure size of mRawSegment
*/
size_t SizeOfExcludingThis(mozilla::MallocSizeOf aMallocSizeOf) const;
void SetBitrate(const uint32_t aBitrate) override
{
mVideoBitrate = aBitrate;
}
+
+ void Init(const VideoSegment& aSegment);
+
+ void SetCurrentFrames(const VideoSegment& aSegment);
+
+ StreamTime SecondsToMediaTime(double aS) const
+ {
+ NS_ASSERTION(0 <= aS && aS <= TRACK_TICKS_MAX/TRACK_RATE_MAX,
+ "Bad seconds");
+ return mTrackRate * aS;
+ }
+
protected:
/**
* Initialized the video encoder. In order to collect the value of width and
* height of source frames, this initialization is delayed until we have
* received the first valid video frame from MediaStreamGraph;
* mReentrantMonitor will be notified after it has successfully initialized,
* and this method is called on the MediaStramGraph thread.
*/
virtual nsresult Init(int aWidth, int aHeight, int aDisplayWidth,
- int aDisplayHeight, TrackRate aTrackRate) = 0;
+ int aDisplayHeight) = 0;
/**
* Appends source video frames to mRawSegment. We only append the source chunk
* if it is unique to mLastChunk. Called on the MediaStreamGraph thread.
*/
nsresult AppendVideoSegment(const VideoSegment& aSegment);
/**
@@ -340,13 +354,17 @@ protected:
StreamTime mLastFrameDuration;
/**
* A segment queue of audio track data, protected by mReentrantMonitor.
*/
VideoSegment mRawSegment;
uint32_t mVideoBitrate;
+
+private:
+ TimeStamp mLastFrameTimeStamp;
+ bool mFirstFrame;
};
} // namespace mozilla
#endif
--- a/dom/media/encoder/VP8TrackEncoder.cpp
+++ b/dom/media/encoder/VP8TrackEncoder.cpp
@@ -23,18 +23,18 @@ LazyLogModule gVP8TrackEncoderLog("VP8Tr
// Debug logging macro with object pointer and class name.
#define DEFAULT_BITRATE_BPS 2500000
#define DEFAULT_ENCODE_FRAMERATE 30
using namespace mozilla::gfx;
using namespace mozilla::layers;
-VP8TrackEncoder::VP8TrackEncoder()
- : VideoTrackEncoder()
+VP8TrackEncoder::VP8TrackEncoder(TrackRate aTrackRate)
+ : VideoTrackEncoder(aTrackRate)
, mEncodedFrameDuration(0)
, mEncodedTimestamp(0)
, mRemainingTicks(0)
, mVPXContext(new vpx_codec_ctx_t())
, mVPXImageWrapper(new vpx_image_t())
{
MOZ_COUNT_CTOR(VP8TrackEncoder);
}
@@ -48,26 +48,24 @@ VP8TrackEncoder::~VP8TrackEncoder()
if (mVPXImageWrapper) {
vpx_img_free(mVPXImageWrapper);
}
MOZ_COUNT_DTOR(VP8TrackEncoder);
}
nsresult
VP8TrackEncoder::Init(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
- int32_t aDisplayHeight,TrackRate aTrackRate)
+ int32_t aDisplayHeight)
{
- if (aWidth < 1 || aHeight < 1 || aDisplayWidth < 1 || aDisplayHeight < 1
- || aTrackRate <= 0) {
+ if (aWidth < 1 || aHeight < 1 || aDisplayWidth < 1 || aDisplayHeight < 1) {
return NS_ERROR_FAILURE;
}
ReentrantMonitorAutoEnter mon(mReentrantMonitor);
- mTrackRate = aTrackRate;
mEncodedFrameRate = DEFAULT_ENCODE_FRAMERATE;
mEncodedFrameDuration = mTrackRate / mEncodedFrameRate;
mFrameWidth = aWidth;
mFrameHeight = aHeight;
mDisplayWidth = aDisplayWidth;
mDisplayHeight = aDisplayHeight;
// Encoder configuration structure.
--- a/dom/media/encoder/VP8TrackEncoder.h
+++ b/dom/media/encoder/VP8TrackEncoder.h
@@ -24,27 +24,26 @@ typedef struct vpx_image vpx_image_t;
class VP8TrackEncoder : public VideoTrackEncoder
{
enum EncodeOperation {
ENCODE_NORMAL_FRAME, // VP8 track encoder works normally.
ENCODE_I_FRAME, // The next frame will be encoded as I-Frame.
SKIP_FRAME, // Skip the next frame.
};
public:
- VP8TrackEncoder();
+ explicit VP8TrackEncoder(TrackRate aTrackRate);
virtual ~VP8TrackEncoder();
already_AddRefed<TrackMetadataBase> GetMetadata() final override;
nsresult GetEncodedTrack(EncodedFrameContainer& aData) final override;
protected:
nsresult Init(int32_t aWidth, int32_t aHeight,
- int32_t aDisplayWidth, int32_t aDisplayHeight,
- TrackRate aTrackRate) final override;
+ int32_t aDisplayWidth, int32_t aDisplayHeight) final override;
private:
// Calculate the target frame's encoded duration.
StreamTime CalculateEncodedDuration(StreamTime aDurationCopied);
// Calculate the mRemainingTicks for next target frame.
StreamTime CalculateRemainingTicks(StreamTime aDurationCopied,
StreamTime aEncodedDuration);
--- a/dom/media/gtest/TestVideoTrackEncoder.cpp
+++ b/dom/media/gtest/TestVideoTrackEncoder.cpp
@@ -170,74 +170,73 @@ private:
mozilla::gfx::IntSize mImageSize;
nsTArray<uint8_t> mSourceBuffer;
};
struct InitParam {
bool mShouldSucceed; // This parameter should cause success or fail result
int mWidth; // frame width
int mHeight; // frame height
- mozilla::TrackRate mTrackRate; // track rate. 90K is the most commond track rate.
};
class TestVP8TrackEncoder: public VP8TrackEncoder
{
public:
+ explicit TestVP8TrackEncoder(TrackRate aTrackRate = 90000)
+ : VP8TrackEncoder(aTrackRate) {}
+
::testing::AssertionResult TestInit(const InitParam &aParam)
{
- nsresult result = Init(aParam.mWidth, aParam.mHeight, aParam.mWidth, aParam.mHeight, aParam.mTrackRate);
+ nsresult result = Init(aParam.mWidth, aParam.mHeight, aParam.mWidth, aParam.mHeight);
if (((NS_FAILED(result) && aParam.mShouldSucceed)) || (NS_SUCCEEDED(result) && !aParam.mShouldSucceed))
{
return ::testing::AssertionFailure()
<< " width = " << aParam.mWidth
- << " height = " << aParam.mHeight
- << " TrackRate = " << aParam.mTrackRate << ".";
+ << " height = " << aParam.mHeight;
}
else
{
return ::testing::AssertionSuccess();
}
}
};
// Init test
TEST(VP8VideoTrackEncoder, Initialization)
{
InitParam params[] = {
// Failure cases.
- { false, 640, 480, 0 }, // Trackrate should be larger than 1.
- { false, 640, 480, -1 }, // Trackrate should be larger than 1.
- { false, 0, 0, 90000 }, // Height/ width should be larger than 1.
- { false, 0, 1, 90000 }, // Height/ width should be larger than 1.
- { false, 1, 0, 90000}, // Height/ width should be larger than 1.
+ { false, 0, 0}, // Height/ width should be larger than 1.
+ { false, 0, 1}, // Height/ width should be larger than 1.
+ { false, 1, 0}, // Height/ width should be larger than 1.
// Success cases
- { true, 640, 480, 90000}, // Standard VGA
- { true, 800, 480, 90000}, // Standard WVGA
- { true, 960, 540, 90000}, // Standard qHD
- { true, 1280, 720, 90000} // Standard HD
+ { true, 640, 480}, // Standard VGA
+ { true, 800, 480}, // Standard WVGA
+ { true, 960, 540}, // Standard qHD
+ { true, 1280, 720} // Standard HD
};
for (size_t i = 0; i < ArrayLength(params); i++)
{
TestVP8TrackEncoder encoder;
EXPECT_TRUE(encoder.TestInit(params[i]));
}
}
// Get MetaData test
TEST(VP8VideoTrackEncoder, FetchMetaData)
{
InitParam params[] = {
// Success cases
- { true, 640, 480, 90000}, // Standard VGA
- { true, 800, 480, 90000}, // Standard WVGA
- { true, 960, 540, 90000}, // Standard qHD
- { true, 1280, 720, 90000} // Standard HD
+ { true, 640, 480}, // Standard VGA
+ { true, 800, 480}, // Standard WVGA
+ { true, 960, 540}, // Standard qHD
+ { true, 1280, 720} // Standard HD
};
for (size_t i = 0; i < ArrayLength(params); i++)
{
TestVP8TrackEncoder encoder;
EXPECT_TRUE(encoder.TestInit(params[i]));
RefPtr<TrackMetadataBase> meta = encoder.GetMetadata();
@@ -249,17 +248,17 @@ TEST(VP8VideoTrackEncoder, FetchMetaData
}
}
// Encode test
TEST(VP8VideoTrackEncoder, FrameEncode)
{
// Initiate VP8 encoder
TestVP8TrackEncoder encoder;
- InitParam param = {true, 640, 480, 90000};
+ InitParam param = {true, 640, 480};
encoder.TestInit(param);
// Create YUV images as source.
nsTArray<RefPtr<Image>> images;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
generator.Generate(images);
@@ -271,29 +270,29 @@ TEST(VP8VideoTrackEncoder, FrameEncode)
RefPtr<Image> image = images[i];
segment.AppendFrame(image.forget(),
mozilla::StreamTime(90000),
generator.GetSize(),
PRINCIPAL_HANDLE_NONE);
}
// track change notification.
- encoder.NotifyQueuedTrackChanges(nullptr, 0, 0, 0, segment);
+ encoder.SetCurrentFrames(segment);
// Pull Encoded Data back from encoder.
EncodedFrameContainer container;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
}
// EOS test
TEST(VP8VideoTrackEncoder, EncodeComplete)
{
// Initiate VP8 encoder
TestVP8TrackEncoder encoder;
- InitParam param = {true, 640, 480, 90000};
+ InitParam param = {true, 640, 480};
encoder.TestInit(param);
// track end notification.
VideoSegment segment;
encoder.NotifyQueuedTrackChanges(nullptr, 0, 0, TrackEventCommand::TRACK_EVENT_ENDED, segment);
// Pull Encoded Data back from encoder. Since we have sent
// EOS to encoder, encoder.GetEncodedTrack should return
--- a/dom/media/gtest/TestWebMWriter.cpp
+++ b/dom/media/gtest/TestWebMWriter.cpp
@@ -23,21 +23,23 @@ public:
}
return false;
}
};
class WebMVP8TrackEncoder: public VP8TrackEncoder
{
public:
+ explicit WebMVP8TrackEncoder(TrackRate aTrackRate = 90000)
+ : VP8TrackEncoder(aTrackRate) {}
+
bool TestVP8Creation(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
- int32_t aDisplayHeight, TrackRate aTrackRate)
+ int32_t aDisplayHeight)
{
- if (NS_SUCCEEDED(Init(aWidth, aHeight, aDisplayWidth, aDisplayHeight,
- aTrackRate))) {
+ if (NS_SUCCEEDED(Init(aWidth, aHeight, aDisplayWidth, aDisplayHeight))) {
return true;
}
return false;
}
};
const uint64_t FIXED_DURATION = 1000000;
const uint32_t FIXED_FRAMESIZE = 500;
@@ -55,17 +57,17 @@ public:
EXPECT_TRUE(opusEncoder.TestOpusCreation(aChannels, aSampleRate));
RefPtr<TrackMetadataBase> opusMeta = opusEncoder.GetMetadata();
SetMetadata(opusMeta);
}
void SetVP8Metadata(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
int32_t aDisplayHeight,TrackRate aTrackRate) {
WebMVP8TrackEncoder vp8Encoder;
EXPECT_TRUE(vp8Encoder.TestVP8Creation(aWidth, aHeight, aDisplayWidth,
- aDisplayHeight, aTrackRate));
+ aDisplayHeight));
RefPtr<TrackMetadataBase> vp8Meta = vp8Encoder.GetMetadata();
SetMetadata(vp8Meta);
}
// When we append an I-Frame into WebM muxer, the muxer will treat previous
// data as "a cluster".
// In these test cases, we will call the function many times to enclose the
// previous cluster so that we can retrieve data by |GetContainerData|.