Bug 1437366 - Set the correct (possibly clamped) rate on the MediaStreamTrack when the MSG runs at a rate not compatible with the webrtc.org code, and fix interval calculation. r?pehrsons
MozReview-Commit-ID: HOPjfdUJBSU
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -1960,43 +1960,47 @@ public:
, mTrackId(aTrack->GetInputTrackId())
, mSource(mTrack->GetInputStream()->AsSourceStream())
, mPlayedTicks(0)
, mPrincipalHandle(PRINCIPAL_HANDLE_NONE)
, mListening(false)
, mMaybeTrackNeedsUnmute(true)
{
MOZ_RELEASE_ASSERT(mSource, "Must be used with a SourceMediaStream");
+ }
+
+ virtual ~GenericReceiveListener()
+ {
+ NS_ReleaseOnMainThreadSystemGroup(
+ "GenericReceiveListener::track_", mTrack.forget());
+ }
+
+ void AddTrackToSource(uint32_t aRate = 0)
+ {
+ MOZ_ASSERT((aRate != 0 && mTrack->AsAudioStreamTrack()) ||
+ mTrack->AsVideoStreamTrack());
if (mTrack->AsAudioStreamTrack()) {
mSource->AddAudioTrack(
- mTrackId, mSource->GraphRate(), 0, new AudioSegment());
+ mTrackId, aRate, 0, new AudioSegment());
} else if (mTrack->AsVideoStreamTrack()) {
mSource->AddTrack(mTrackId, 0, new VideoSegment());
- } else {
- MOZ_ASSERT_UNREACHABLE("Unknown track type");
}
CSFLogDebug(
LOGTAG,
"GenericReceiveListener added %s track %d (%p) to stream %p",
mTrack->AsAudioStreamTrack() ? "audio" : "video",
mTrackId,
mTrack.get(),
mSource.get());
mSource->AdvanceKnownTracksTime(STREAM_TIME_MAX);
mSource->AddListener(this);
}
- virtual ~GenericReceiveListener()
- {
- NS_ReleaseOnMainThreadSystemGroup(
- "GenericReceiveListener::track_", mTrack.forget());
- }
-
void AddSelf()
{
if (!mListening) {
mListening = true;
mSource->SetPullEnabled(true);
mMaybeTrackNeedsUnmute = true;
}
}
@@ -2110,16 +2114,17 @@ public:
->IsSamplingFreqSupported(mSource->GraphRate())
? mSource->GraphRate()
: WEBRTC_MAX_SAMPLE_RATE)
, mTaskQueue(
new AutoTaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER),
"AudioPipelineListener"))
, mLastLog(0)
{
+ AddTrackToSource(mRate);
}
// Implement MediaStreamListener
void NotifyPull(MediaStreamGraph* aGraph,
StreamTime aDesiredTime) override
{
NotifyPullImpl(aDesiredTime);
}
@@ -2141,20 +2146,23 @@ private:
{
NS_ReleaseOnMainThreadSystemGroup("MediaPipeline::mConduit",
mConduit.forget());
}
void NotifyPullImpl(StreamTime aDesiredTime)
{
uint32_t samplesPer10ms = mRate / 100;
- // Determine how many frames we need.
- // As we get frames from conduit_ at the same rate as the graph's rate,
- // the number of frames needed straightfully determined.
- TrackTicks framesNeeded = aDesiredTime - mPlayedTicks;
+
+ // mSource's rate is not necessarily the same as the graph rate, since there
+ // are sample-rate constraints on the inbound audio: only 16, 32, 44.1 and
+ // 48kHz are supported. The audio frames we get here is going to be
+ // resampled when inserted into the graph.
+ TrackTicks desired = mSource->TimeToTicksRoundUp(mRate, aDesiredTime);
+ TrackTicks framesNeeded = desired - mPlayedTicks;
while (framesNeeded >= 0) {
const int scratchBufferLength =
AUDIO_SAMPLE_BUFFER_MAX_BYTES / sizeof(int16_t);
int16_t scratchBuffer[scratchBufferLength];
int samplesLength = scratchBufferLength;
@@ -2308,16 +2316,17 @@ class MediaPipelineReceiveVideo::Pipelin
{
public:
explicit PipelineListener(dom::MediaStreamTrack* aTrack)
: GenericReceiveListener(aTrack)
, mImageContainer(
LayerManager::CreateImageContainer(ImageContainer::ASYNCHRONOUS))
, mMutex("Video PipelineListener")
{
+ AddTrackToSource();
}
// Implement MediaStreamListener
void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) override
{
MutexAutoLock lock(mMutex);
RefPtr<Image> image = mImage;