Bug 1201363 - Rename StreamBuffer to StreamTracks. r?jesup,r?pehrsons draft
authorctai <ctai@mozilla.com>
Tue, 26 Jan 2016 10:49:01 +0800
changeset 344705 15bb8a82689ab61b32496bcec00194ef09d7b3a8
parent 323526 6764bc656c1d146962d53710d734c2ac87c2306f
child 344706 6d6c11003fa5d5ddfc0daf2d411491863d1a6f81
push id13910
push userbmo:ctai@mozilla.com
push dateFri, 25 Mar 2016 13:21:09 +0000
reviewersjesup, pehrsons
bugs1201363
milestone46.0a1
Bug 1201363 - Rename StreamBuffer to StreamTracks. r?jesup,r?pehrsons Rename StreamBuffer to StreamTracks. We still need a place to keep the track information in every MediaStream, even the StreamBuffer::Track::mSegment is empty. MozReview-Commit-ID: JTRW8L4hKjq
StreamTracks.cpp
StreamTracks.h
dom/media/AudioCaptureStream.cpp
dom/media/AudioCaptureStream.h
dom/media/CanvasCaptureMediaStream.h
dom/media/DOMMediaStream.cpp
dom/media/DOMMediaStream.h
dom/media/MediaInfo.h
dom/media/MediaSegment.h
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraph.h
dom/media/MediaStreamGraphImpl.h
dom/media/MediaStreamTrack.h
dom/media/StreamBuffer.cpp
dom/media/StreamBuffer.h
dom/media/StreamTracks.cpp
dom/media/StreamTracks.h
dom/media/TrackUnionStream.cpp
dom/media/TrackUnionStream.h
dom/media/encoder/TrackEncoder.h
dom/media/moz.build
dom/media/webaudio/AudioNodeExternalInputStream.cpp
dom/media/webaudio/AudioNodeStream.cpp
dom/media/webrtc/MediaEngineDefault.h
dom/media/webrtc/MediaEngineRemoteVideoSource.h
dom/media/webrtc/MediaEngineWebRTC.h
media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h
media/webrtc/signaling/test/FakeMediaStreams.h
media/webrtc/signaling/test/FakePCObserver.h
rename from dom/media/StreamBuffer.cpp
rename to StreamTracks.cpp
rename from dom/media/StreamBuffer.h
rename to StreamTracks.h
--- a/dom/media/AudioCaptureStream.cpp
+++ b/dom/media/AudioCaptureStream.cpp
@@ -43,17 +43,17 @@ AudioCaptureStream::~AudioCaptureStream(
   mMixer.RemoveCallback(this);
 }
 
 void
 AudioCaptureStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
                                  uint32_t aFlags)
 {
   uint32_t inputCount = mInputs.Length();
-  StreamBuffer::Track* track = EnsureTrack(mTrackId);
+  StreamTracks::Track* track = EnsureTrack(mTrackId);
   // Notify the DOM everything is in order.
   if (!mTrackCreated) {
     for (uint32_t i = 0; i < mListeners.Length(); i++) {
       MediaStreamListener* l = mListeners[i];
       AudioSegment tmp;
       l->NotifyQueuedTrackChanges(
         Graph(), mTrackId, 0, MediaStreamListener::TRACK_EVENT_CREATED, tmp);
       l->NotifyFinishedTrackCreation(Graph());
@@ -69,17 +69,17 @@ AudioCaptureStream::ProcessInput(GraphTi
     track->Get<AudioSegment>()->AppendNullData(aTo - aFrom);
   } else {
     // We mix down all the tracks of all inputs, to a stereo track. Everything
     // is {up,down}-mixed to stereo.
     mMixer.StartMixing();
     AudioSegment output;
     for (uint32_t i = 0; i < inputCount; i++) {
       MediaStream* s = mInputs[i]->GetSource();
-      StreamBuffer::TrackIter tracks(s->GetStreamBuffer(), MediaSegment::AUDIO);
+      StreamTracks::TrackIter tracks(s->GetStreamTracks(), MediaSegment::AUDIO);
       while (!tracks.IsEnded()) {
         AudioSegment* inputSegment = tracks->Get<AudioSegment>();
         StreamTime inputStart = s->GraphTimeToStreamTimeWithBlocking(aFrom);
         StreamTime inputEnd = s->GraphTimeToStreamTimeWithBlocking(aTo);
         AudioSegment toMix;
         toMix.AppendSlice(*inputSegment, inputStart, inputEnd);
         // Care for streams blocked in the [aTo, aFrom] range.
         if (inputEnd - inputStart < aTo - aFrom) {
@@ -89,17 +89,17 @@ AudioCaptureStream::ProcessInput(GraphTi
         tracks.Next();
       }
     }
     // This calls MixerCallback below
     mMixer.FinishMixing();
   }
 
   // Regardless of the status of the input tracks, we go foward.
-  mBuffer.AdvanceKnownTracksTime(GraphTimeToStreamTimeWithBlocking((aTo)));
+  mTracks.AdvanceKnownTracksTime(GraphTimeToStreamTimeWithBlocking((aTo)));
 }
 
 void
 AudioCaptureStream::MixerCallback(AudioDataValue* aMixedBuffer,
                                   AudioSampleFormat aFormat, uint32_t aChannels,
                                   uint32_t aFrames, uint32_t aSampleRate)
 {
   nsAutoTArray<nsTArray<AudioDataValue>, MONO> output;
--- a/dom/media/AudioCaptureStream.h
+++ b/dom/media/AudioCaptureStream.h
@@ -3,17 +3,17 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MOZILLA_AUDIOCAPTURESTREAM_H_
 #define MOZILLA_AUDIOCAPTURESTREAM_H_
 
 #include "MediaStreamGraph.h"
 #include "AudioMixer.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 #include <algorithm>
 
 namespace mozilla
 {
 
 class DOMMediaStream;
 
 /**
--- a/dom/media/CanvasCaptureMediaStream.h
+++ b/dom/media/CanvasCaptureMediaStream.h
@@ -3,17 +3,17 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef mozilla_dom_CanvasCaptureMediaStream_h_
 #define mozilla_dom_CanvasCaptureMediaStream_h_
 
 #include "DOMMediaStream.h"
 #include "mozilla/dom/HTMLCanvasElement.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 
 namespace mozilla {
 class DOMMediaStream;
 class MediaStreamListener;
 class SourceMediaStream;
 
 namespace layers {
 class Image;
--- a/dom/media/DOMMediaStream.cpp
+++ b/dom/media/DOMMediaStream.cpp
@@ -1123,17 +1123,17 @@ DOMHwMediaStream::SetImageSize(uint32_t 
   OverlayImage::Data imgData;
 
   imgData.mOverlayId = mOverlayImage->GetOverlayId();
   imgData.mSize = IntSize(width, height);
   mOverlayImage->SetData(imgData);
 #endif
 
   SourceMediaStream* srcStream = GetInputStream()->AsSourceStream();
-  StreamBuffer::Track* track = srcStream->FindTrack(TRACK_VIDEO_PRIMARY);
+  StreamTracks::Track* track = srcStream->FindTrack(TRACK_VIDEO_PRIMARY);
 
   if (!track || !track->GetSegment()) {
     return;
   }
 
 #ifdef MOZ_WIDGET_GONK
   // Clear the old segment.
   // Changing the existing content of segment is a Very BAD thing, and this way will
--- a/dom/media/DOMMediaStream.h
+++ b/dom/media/DOMMediaStream.h
@@ -5,17 +5,17 @@
 
 #ifndef NSDOMMEDIASTREAM_H_
 #define NSDOMMEDIASTREAM_H_
 
 #include "ImageContainer.h"
 
 #include "nsCycleCollectionParticipant.h"
 #include "nsWrapperCache.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 #include "nsIDOMWindow.h"
 #include "nsIPrincipal.h"
 #include "mozilla/PeerIdentity.h"
 #include "mozilla/DOMEventTargetHelper.h"
 #include "mozilla/CORSMode.h"
 
 // GetCurrentTime is defined in winbase.h as zero argument macro forwarding to
 // GetTickCount() and conflicts with NS_DECL_NSIDOMMEDIASTREAM, containing
--- a/dom/media/MediaInfo.h
+++ b/dom/media/MediaInfo.h
@@ -9,17 +9,17 @@
 #include "mozilla/UniquePtr.h"
 #include "nsRect.h"
 #include "mozilla/RefPtr.h"
 #include "nsSize.h"
 #include "nsString.h"
 #include "nsTArray.h"
 #include "ImageTypes.h"
 #include "MediaData.h"
-#include "StreamBuffer.h" // for TrackID
+#include "StreamTracks.h" // for TrackID
 #include "TimeUnits.h"
 
 namespace mozilla {
 
 class AudioInfo;
 class VideoInfo;
 class TextInfo;
 
--- a/dom/media/MediaSegment.h
+++ b/dom/media/MediaSegment.h
@@ -38,17 +38,17 @@ const int64_t TRACK_TICKS_MAX = INT64_MA
 /**
  * We represent media times in 64-bit audio frame counts or ticks.
  * All tracks in a MediaStreamGraph have the same rate.
  */
 typedef int64_t MediaTime;
 const int64_t MEDIA_TIME_MAX = TRACK_TICKS_MAX;
 
 /**
- * Media time relative to the start of a StreamBuffer.
+ * Media time relative to the start of a StreamTracks.
  */
 typedef MediaTime StreamTime;
 const StreamTime STREAM_TIME_MAX = MEDIA_TIME_MAX;
 
 /**
  * Media time relative to the start of the graph timeline.
  */
 typedef MediaTime GraphTime;
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -72,25 +72,25 @@ MediaStreamGraphImpl::~MediaStreamGraphI
 
 void
 MediaStreamGraphImpl::FinishStream(MediaStream* aStream)
 {
   if (aStream->mFinished)
     return;
   STREAM_LOG(LogLevel::Debug, ("MediaStream %p will finish", aStream));
   aStream->mFinished = true;
-  aStream->mBuffer.AdvanceKnownTracksTime(STREAM_TIME_MAX);
+  aStream->mTracks.AdvanceKnownTracksTime(STREAM_TIME_MAX);
 
   SetStreamOrderDirty();
 }
 
 void
 MediaStreamGraphImpl::AddStreamGraphThread(MediaStream* aStream)
 {
-  aStream->mBufferStartTime = mProcessedTime;
+  aStream->mTracksStartTime = mProcessedTime;
   if (aStream->IsSuspended()) {
     mSuspendedStreams.AppendElement(aStream);
     STREAM_LOG(LogLevel::Debug, ("Adding media stream %p to the graph, in the suspended stream array", aStream));
   } else {
     mStreams.AppendElement(aStream);
     STREAM_LOG(LogLevel::Debug, ("Adding media stream %p to the graph", aStream));
   }
 
@@ -136,24 +136,24 @@ MediaStreamGraphImpl::ExtractPendingInpu
     MutexAutoLock lock(aStream->mMutex);
     if (aStream->mPullEnabled && !aStream->mFinished &&
         !aStream->mListeners.IsEmpty()) {
       // Compute how much stream time we'll need assuming we don't block
       // the stream at all.
       StreamTime t = aStream->GraphTimeToStreamTime(aDesiredUpToTime);
       STREAM_LOG(LogLevel::Verbose, ("Calling NotifyPull aStream=%p t=%f current end=%f", aStream,
                                   MediaTimeToSeconds(t),
-                                  MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
-      if (t > aStream->mBuffer.GetEnd()) {
+                                  MediaTimeToSeconds(aStream->mTracks.GetEnd())));
+      if (t > aStream->mTracks.GetEnd()) {
         *aEnsureNextIteration = true;
 #ifdef DEBUG
         if (aStream->mListeners.Length() == 0) {
           STREAM_LOG(LogLevel::Error, ("No listeners in NotifyPull aStream=%p desired=%f current end=%f",
                                     aStream, MediaTimeToSeconds(t),
-                                    MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
+                                    MediaTimeToSeconds(aStream->mTracks.GetEnd())));
           aStream->DumpTrackInfo();
         }
 #endif
         for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
           MediaStreamListener* l = aStream->mListeners[j];
           {
             MutexAutoUnlock unlock(aStream->mMutex);
             l->NotifyPull(this, t);
@@ -163,72 +163,72 @@ MediaStreamGraphImpl::ExtractPendingInpu
     }
     finished = aStream->mUpdateFinished;
     bool notifiedTrackCreated = false;
     for (int32_t i = aStream->mUpdateTracks.Length() - 1; i >= 0; --i) {
       SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i];
       aStream->ApplyTrackDisabling(data->mID, data->mData);
       for (MediaStreamListener* l : aStream->mListeners) {
         StreamTime offset = (data->mCommands & SourceMediaStream::TRACK_CREATE)
-            ? data->mStart : aStream->mBuffer.FindTrack(data->mID)->GetSegment()->GetDuration();
+            ? data->mStart : aStream->mTracks.FindTrack(data->mID)->GetSegment()->GetDuration();
         l->NotifyQueuedTrackChanges(this, data->mID,
                                     offset, data->mCommands, *data->mData);
       }
       if (data->mCommands & SourceMediaStream::TRACK_CREATE) {
         MediaSegment* segment = data->mData.forget();
         STREAM_LOG(LogLevel::Debug, ("SourceMediaStream %p creating track %d, start %lld, initial end %lld",
                                   aStream, data->mID, int64_t(data->mStart),
                                   int64_t(segment->GetDuration())));
 
         data->mEndOfFlushedData += segment->GetDuration();
-        aStream->mBuffer.AddTrack(data->mID, data->mStart, segment);
+        aStream->mTracks.AddTrack(data->mID, data->mStart, segment);
         // The track has taken ownership of data->mData, so let's replace
         // data->mData with an empty clone.
         data->mData = segment->CreateEmptyClone();
         data->mCommands &= ~SourceMediaStream::TRACK_CREATE;
         notifiedTrackCreated = true;
       } else if (data->mData->GetDuration() > 0) {
-        MediaSegment* dest = aStream->mBuffer.FindTrack(data->mID)->GetSegment();
+        MediaSegment* dest = aStream->mTracks.FindTrack(data->mID)->GetSegment();
         STREAM_LOG(LogLevel::Verbose, ("SourceMediaStream %p track %d, advancing end from %lld to %lld",
                                     aStream, data->mID,
                                     int64_t(dest->GetDuration()),
                                     int64_t(dest->GetDuration() + data->mData->GetDuration())));
         data->mEndOfFlushedData += data->mData->GetDuration();
         dest->AppendFrom(data->mData);
       }
       if (data->mCommands & SourceMediaStream::TRACK_END) {
-        aStream->mBuffer.FindTrack(data->mID)->SetEnded();
+        aStream->mTracks.FindTrack(data->mID)->SetEnded();
         aStream->mUpdateTracks.RemoveElementAt(i);
       }
     }
     if (notifiedTrackCreated) {
       for (MediaStreamListener* l : aStream->mListeners) {
         l->NotifyFinishedTrackCreation(this);
       }
     }
     if (!aStream->mFinished) {
-      aStream->mBuffer.AdvanceKnownTracksTime(aStream->mUpdateKnownTracksTime);
+      aStream->mTracks.AdvanceKnownTracksTime(aStream->mUpdateKnownTracksTime);
     }
   }
-  if (aStream->mBuffer.GetEnd() > 0) {
+  if (aStream->mTracks.GetEnd() > 0) {
     aStream->mHasCurrentData = true;
   }
   if (finished) {
     FinishStream(aStream);
   }
 }
 
 StreamTime
 MediaStreamGraphImpl::GraphTimeToStreamTimeWithBlocking(MediaStream* aStream,
                                                         GraphTime aTime)
 {
   MOZ_ASSERT(aTime <= mStateComputedTime,
              "Don't ask about times where we haven't made blocking decisions yet");
   return std::max<StreamTime>(0,
-      std::min(aTime, aStream->mStartBlocking) - aStream->mBufferStartTime);
+      std::min(aTime, aStream->mStartBlocking) - aStream->mTracksStartTime);
 }
 
 GraphTime
 MediaStreamGraphImpl::IterationEnd() const
 {
   return CurrentDriver()->IterationEnd();
 }
 
@@ -241,17 +241,17 @@ MediaStreamGraphImpl::UpdateCurrentTimeF
 
     // Calculate blocked time and fire Blocked/Unblocked events
     GraphTime blockedTime = mStateComputedTime - stream->mStartBlocking;
     NS_ASSERTION(blockedTime >= 0, "Error in blocking time");
     stream->AdvanceTimeVaryingValuesToCurrentTime(mStateComputedTime,
                                                   blockedTime);
     STREAM_LOG(LogLevel::Verbose,
                ("MediaStream %p bufferStartTime=%f blockedTime=%f", stream,
-                MediaTimeToSeconds(stream->mBufferStartTime),
+                MediaTimeToSeconds(stream->mTracksStartTime),
                 MediaTimeToSeconds(blockedTime)));
     stream->mStartBlocking = mStateComputedTime;
 
     if (isAnyUnblocked && stream->mNotifiedBlocked) {
       for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
         MediaStreamListener* l = stream->mListeners[j];
         l->NotifyBlockingChanged(this, MediaStreamListener::UNBLOCKED);
       }
@@ -273,17 +273,17 @@ MediaStreamGraphImpl::UpdateCurrentTimeF
         l->NotifyOutput(this, mProcessedTime);
       }
     }
 
     // The stream is fully finished when all of its track data has been played
     // out.
     if (stream->mFinished && !stream->mNotifiedFinished &&
         mProcessedTime >=
-          stream->StreamTimeToGraphTime(stream->GetStreamBuffer().GetAllTracksEnd())) {
+          stream->StreamTimeToGraphTime(stream->GetStreamTracks().GetAllTracksEnd())) {
       stream->mNotifiedFinished = true;
       SetStreamOrderDirty();
       for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
         MediaStreamListener* l = stream->mListeners[j];
         l->NotifyEvent(this, MediaStreamListener::EVENT_FINISHED);
       }
     }
   }
@@ -297,17 +297,17 @@ MediaStreamGraphImpl::WillUnderrun(Media
   // underrun currently, since we'll always be able to produce data for them
   // unless they block on some other stream.
   if (aStream->mFinished || aStream->AsProcessedStream()) {
     return aEndBlockingDecisions;
   }
   // This stream isn't finished or suspended. We don't need to call
   // StreamTimeToGraphTime since an underrun is the only thing that can block
   // it.
-  GraphTime bufferEnd = aStream->GetBufferEnd() + aStream->mBufferStartTime;
+  GraphTime bufferEnd = aStream->GetBufferEnd() + aStream->mTracksStartTime;
 #ifdef DEBUG
   if (bufferEnd < mProcessedTime) {
     STREAM_LOG(LogLevel::Error, ("MediaStream %p underrun, "
                               "bufferEnd %f < mProcessedTime %f (%lld < %lld), Streamtime %lld",
                               aStream, MediaTimeToSeconds(bufferEnd), MediaTimeToSeconds(mProcessedTime),
                               bufferEnd, mProcessedTime, aStream->GetBufferEnd()));
     aStream->DumpTrackInfo();
     NS_ASSERTION(bufferEnd >= mProcessedTime, "Buffer underran");
@@ -337,17 +337,17 @@ MediaStreamGraphImpl::UpdateStreamOrder(
         stream->AsSourceStream()->NeedsMixing()) {
       shouldAEC = true;
     }
 #endif
     // If this is a AudioNodeStream, force a AudioCallbackDriver.
     if (stream->AsAudioNodeStream()) {
       audioTrackPresent = true;
     } else {
-      for (StreamBuffer::TrackIter tracks(stream->GetStreamBuffer(), MediaSegment::AUDIO);
+      for (StreamTracks::TrackIter tracks(stream->GetStreamTracks(), MediaSegment::AUDIO);
            !tracks.IsEnded(); tracks.Next()) {
         audioTrackPresent = true;
       }
     }
   }
 
   if (!audioTrackPresent && mRealtime &&
       CurrentDriver()->AsAudioCallbackDriver()) {
@@ -592,29 +592,29 @@ MediaStreamGraphImpl::CreateOrDestroyAud
 {
   MOZ_ASSERT(mRealtime, "Should only attempt to create audio streams in real-time mode");
 
   if (aStream->mAudioOutputs.IsEmpty()) {
     aStream->mAudioOutputStreams.Clear();
     return;
   }
 
-  if (!aStream->GetStreamBuffer().GetAndResetTracksDirty() &&
+  if (!aStream->GetStreamTracks().GetAndResetTracksDirty() &&
       !aStream->mAudioOutputStreams.IsEmpty()) {
     return;
   }
 
   STREAM_LOG(LogLevel::Debug, ("Updating AudioOutputStreams for MediaStream %p", aStream));
 
   nsAutoTArray<bool,2> audioOutputStreamsFound;
   for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) {
     audioOutputStreamsFound.AppendElement(false);
   }
 
-  for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::AUDIO);
+  for (StreamTracks::TrackIter tracks(aStream->GetStreamTracks(), MediaSegment::AUDIO);
        !tracks.IsEnded(); tracks.Next()) {
     uint32_t i;
     for (i = 0; i < audioOutputStreamsFound.Length(); ++i) {
       if (aStream->mAudioOutputStreams[i].mTrackID == tracks->GetID()) {
         break;
       }
     }
     if (i < audioOutputStreamsFound.Length()) {
@@ -664,23 +664,23 @@ MediaStreamGraphImpl::PlayAudio(MediaStr
   }
 
   StreamTime ticksWritten = 0;
 
   for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) {
     ticksWritten = 0;
 
     MediaStream::AudioOutputStream& audioOutput = aStream->mAudioOutputStreams[i];
-    StreamBuffer::Track* track = aStream->mBuffer.FindTrack(audioOutput.mTrackID);
+    StreamTracks::Track* track = aStream->mTracks.FindTrack(audioOutput.mTrackID);
     AudioSegment* audio = track->Get<AudioSegment>();
     AudioSegment output;
 
     StreamTime offset = aStream->GraphTimeToStreamTime(mProcessedTime);
 
-    // We don't update aStream->mBufferStartTime here to account for time spent
+    // We don't update aStream->mTracksStartTime here to account for time spent
     // blocked. Instead, we'll update it in UpdateCurrentTimeForStreams after
     // the blocked period has completed. But we do need to make sure we play
     // from the right offsets in the stream buffer, even if we've already
     // written silence for some amount of blocked time after the current time.
     GraphTime t = mProcessedTime;
     while (t < mStateComputedTime) {
       bool blocked = t >= aStream->mStartBlocking;
       GraphTime end = blocked ? mStateComputedTime : aStream->mStartBlocking;
@@ -788,29 +788,29 @@ MediaStreamGraphImpl::PlayVideo(MediaStr
     return;
 
   TimeStamp currentTimeStamp = CurrentDriver()->GetCurrentTimeStamp();
 
   // Collect any new frames produced in this iteration.
   nsAutoTArray<ImageContainer::NonOwningImage,4> newImages;
   RefPtr<Image> blackImage;
 
-  MOZ_ASSERT(mProcessedTime >= aStream->mBufferStartTime, "frame position before buffer?");
+  MOZ_ASSERT(mProcessedTime >= aStream->mTracksStartTime, "frame position before buffer?");
   // We only look at the non-blocking interval
   StreamTime frameBufferTime = aStream->GraphTimeToStreamTime(mProcessedTime);
   StreamTime bufferEndTime = aStream->GraphTimeToStreamTime(aStream->mStartBlocking);
   StreamTime start;
   const VideoChunk* chunk;
   for ( ;
        frameBufferTime < bufferEndTime;
        frameBufferTime = start + chunk->GetDuration()) {
     // Pick the last track that has a video chunk for the time, and
     // schedule its frame.
     chunk = nullptr;
-    for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(),
+    for (StreamTracks::TrackIter tracks(aStream->GetStreamTracks(),
                                         MediaSegment::VIDEO);
          !tracks.IsEnded();
          tracks.Next()) {
       VideoSegment* segment = tracks->Get<VideoSegment>();
       StreamTime thisStart;
       const VideoChunk* thisChunk =
         segment->FindChunkContaining(frameBufferTime, &thisStart);
       if (thisChunk && thisChunk->mFrame.GetImage()) {
@@ -1069,18 +1069,18 @@ MediaStreamGraphImpl::UpdateGraph(GraphT
     if (SourceMediaStream* is = stream->AsSourceStream()) {
       ExtractPendingInput(is, aEndBlockingDecisions, &ensureNextIteration);
     }
 
     if (stream->mFinished) {
       // The stream's not suspended, and since it's finished, underruns won't
       // stop it playing out. So there's no blocking other than what we impose
       // here.
-      GraphTime endTime = stream->GetStreamBuffer().GetAllTracksEnd() +
-          stream->mBufferStartTime;
+      GraphTime endTime = stream->GetStreamTracks().GetAllTracksEnd() +
+          stream->mTracksStartTime;
       if (endTime <= mStateComputedTime) {
         STREAM_LOG(LogLevel::Verbose, ("MediaStream %p is blocked due to being finished", stream));
         stream->mStartBlocking = mStateComputedTime;
       } else {
         STREAM_LOG(LogLevel::Verbose, ("MediaStream %p is finished, but not blocked yet (end at %f, with blocking at %f)",
             stream, MediaTimeToSeconds(stream->GetBufferEnd()),
             MediaTimeToSeconds(endTime)));
         // Data can't be added to a finished stream, so underruns are irrelevant.
@@ -1140,17 +1140,17 @@ MediaStreamGraphImpl::Process()
 #endif
           // Since an AudioNodeStream is present, go ahead and
           // produce audio block by block for all the rest of the streams.
           ProduceDataForStreamsBlockByBlock(i, n->SampleRate());
           doneAllProducing = true;
         } else {
           ps->ProcessInput(mProcessedTime, mStateComputedTime,
                            ProcessedMediaStream::ALLOW_FINISH);
-          NS_WARN_IF_FALSE(stream->mBuffer.GetEnd() >=
+          NS_WARN_IF_FALSE(stream->mTracks.GetEnd() >=
                            GraphTimeToStreamTimeWithBlocking(stream, mStateComputedTime),
                            "Stream did not produce enough data");
         }
       }
     }
     NotifyHasCurrentData(stream);
     // Only playback audio and video in real-time mode
     if (mRealtime) {
@@ -1630,17 +1630,17 @@ MediaStreamGraphImpl::AppendMessage(Cont
     return;
   }
 
   mCurrentTaskMessageQueue.AppendElement(aMessage);
   EnsureRunInStableState();
 }
 
 MediaStream::MediaStream(DOMMediaStream* aWrapper)
-  : mBufferStartTime(0)
+  : mTracksStartTime(0)
   , mStartBlocking(GRAPH_TIME_MAX)
   , mSuspendedCount(0)
   , mFinished(false)
   , mNotifiedFinished(false)
   , mNotifiedBlocked(false)
   , mHasCurrentData(false)
   , mNotifiedHasCurrentData(false)
   , mWrapper(aWrapper)
@@ -1669,17 +1669,17 @@ MediaStream::SizeOfExcludingThis(MallocS
   // - mConsumers - elements
   // Future:
   // - mWrapper
   // - mVideoOutputs - elements
   // - mLastPlayedVideoFrame
   // - mListeners - elements
   // - mAudioOutputStream - elements
 
-  amount += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
+  amount += mTracks.SizeOfExcludingThis(aMallocSizeOf);
   amount += mAudioOutputs.ShallowSizeOfExcludingThis(aMallocSizeOf);
   amount += mVideoOutputs.ShallowSizeOfExcludingThis(aMallocSizeOf);
   amount += mListeners.ShallowSizeOfExcludingThis(aMallocSizeOf);
   amount += mMainThreadListeners.ShallowSizeOfExcludingThis(aMallocSizeOf);
   amount += mDisabledTrackIDs.ShallowSizeOfExcludingThis(aMallocSizeOf);
   amount += mConsumers.ShallowSizeOfExcludingThis(aMallocSizeOf);
 
   return amount;
@@ -1704,72 +1704,72 @@ MediaStream::Graph()
 }
 
 void
 MediaStream::SetGraphImpl(MediaStreamGraphImpl* aGraph)
 {
   MOZ_ASSERT(!mGraph, "Should only be called once");
   mGraph = aGraph;
   mAudioChannelType = aGraph->AudioChannel();
-  mBuffer.InitGraphRate(aGraph->GraphRate());
+  mTracks.InitGraphRate(aGraph->GraphRate());
 }
 
 void
 MediaStream::SetGraphImpl(MediaStreamGraph* aGraph)
 {
   MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph);
   SetGraphImpl(graph);
 }
 
 StreamTime
 MediaStream::GraphTimeToStreamTime(GraphTime aTime)
 {
   NS_ASSERTION(mStartBlocking == GraphImpl()->mStateComputedTime ||
                aTime <= mStartBlocking,
                "Incorrectly ignoring blocking!");
-  return aTime - mBufferStartTime;
+  return aTime - mTracksStartTime;
 }
 
 GraphTime
 MediaStream::StreamTimeToGraphTime(StreamTime aTime)
 {
   NS_ASSERTION(mStartBlocking == GraphImpl()->mStateComputedTime ||
-               aTime + mBufferStartTime <= mStartBlocking,
+               aTime + mTracksStartTime <= mStartBlocking,
                "Incorrectly ignoring blocking!");
-  return aTime + mBufferStartTime;
+  return aTime + mTracksStartTime;
 }
 
 StreamTime
 MediaStream::GraphTimeToStreamTimeWithBlocking(GraphTime aTime)
 {
   return GraphImpl()->GraphTimeToStreamTimeWithBlocking(this, aTime);
 }
 
 void
 MediaStream::FinishOnGraphThread()
 {
   GraphImpl()->FinishStream(this);
 }
 
-StreamBuffer::Track*
+StreamTracks::Track*
 MediaStream::EnsureTrack(TrackID aTrackId)
 {
-  StreamBuffer::Track* track = mBuffer.FindTrack(aTrackId);
+  StreamTracks::Track* track = mTracks.FindTrack(aTrackId);
   if (!track) {
     nsAutoPtr<MediaSegment> segment(new AudioSegment());
     for (uint32_t j = 0; j < mListeners.Length(); ++j) {
       MediaStreamListener* l = mListeners[j];
       l->NotifyQueuedTrackChanges(Graph(), aTrackId, 0,
                                   MediaStreamListener::TRACK_EVENT_CREATED,
                                   *segment);
       // TODO If we ever need to ensure several tracks at once, we will have to
       // change this.
       l->NotifyFinishedTrackCreation(Graph());
     }
-    track = &mBuffer.AddTrack(aTrackId, 0, segment.forget());
+    track = &mTracks.AddTrack(aTrackId, 0, segment.forget());
   }
   return track;
 }
 
 void
 MediaStream::RemoveAllListenersImpl()
 {
   for (int32_t i = mListeners.Length() - 1; i >= 0; --i) {
@@ -2212,20 +2212,20 @@ SourceMediaStream::FinishAddTracks()
 {
   MutexAutoLock lock(mMutex);
   mUpdateTracks.AppendElements(Move(mPendingTracks));
   if (GraphImpl()) {
     GraphImpl()->EnsureNextIteration();
   }
 }
 
-StreamBuffer::Track*
+StreamTracks::Track*
 SourceMediaStream::FindTrack(TrackID aID)
 {
-  return mBuffer.FindTrack(aID);
+  return mTracks.FindTrack(aID);
 }
 
 void
 SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSegment* aSegment)
 {
   if (aSegment->GetType() != MediaSegment::AUDIO ||
       aTrackData->mInputRate == GraphImpl()->GraphRate()) {
     return;
@@ -3085,17 +3085,17 @@ MediaStreamGraphImpl::ApplyAudioContextO
   // here as well so we don't have to store the Promise(s) on the Graph.
   if (aOperation != AudioContextOperation::Resume) {
     bool audioTrackPresent = false;
     for (uint32_t i = 0; i < mStreams.Length(); ++i) {
       MediaStream* stream = mStreams[i];
       if (stream->AsAudioNodeStream()) {
         audioTrackPresent = true;
       }
-      for (StreamBuffer::TrackIter tracks(stream->GetStreamBuffer(), MediaSegment::AUDIO);
+      for (StreamTracks::TrackIter tracks(stream->GetStreamTracks(), MediaSegment::AUDIO);
           !tracks.IsEnded(); tracks.Next()) {
         audioTrackPresent = true;
       }
     }
     if (!audioTrackPresent && CurrentDriver()->AsAudioCallbackDriver()) {
       CurrentDriver()->AsAudioCallbackDriver()->
         EnqueueStreamAndPromiseForOperation(aDestinationStream, aPromise,
                                             aOperation);
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -11,17 +11,17 @@
 #include "mozilla/TaskQueue.h"
 
 #include "mozilla/dom/AudioChannelBinding.h"
 
 #include "AudioSegment.h"
 #include "AudioStream.h"
 #include "nsTArray.h"
 #include "nsIRunnable.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 #include "VideoFrameContainer.h"
 #include "VideoSegment.h"
 #include "MainThreadUtils.h"
 #include "nsAutoRef.h"
 #include <speex/speex_resampler.h>
 #include "DOMMediaStream.h"
 
 class nsIRunnable;
@@ -271,17 +271,17 @@ class CameraPreviewMediaStream;
  * to have buffered data but still be blocked.
  *
  * Any stream can have its audio and video playing when requested. The media
  * stream graph plays audio by constructing audio output streams as necessary.
  * Video is played by setting video frames into an VideoFrameContainer at the right
  * time. To ensure video plays in sync with audio, make sure that the same
  * stream is playing both the audio and video.
  *
- * The data in a stream is managed by StreamBuffer. It consists of a set of
+ * The data in a stream is managed by StreamTracks. It consists of a set of
  * tracks of various types that can start and end over time.
  *
  * Streams are explicitly managed. The client creates them via
  * MediaStreamGraph::CreateInput/ProcessedMediaStream, and releases them by calling
  * Destroy() when no longer needed (actual destruction will be deferred).
  * The actual object is owned by the MediaStreamGraph. The basic idea is that
  * main thread objects will keep Streams alive as long as necessary (using the
  * cycle collector to clean up whenever needed).
@@ -335,17 +335,17 @@ public:
    * Sets the graph that owns this stream.  Should only be called once.
    */
   void SetGraphImpl(MediaStreamGraphImpl* aGraph);
   void SetGraphImpl(MediaStreamGraph* aGraph);
 
   /**
    * Returns sample rate of the graph.
    */
-  TrackRate GraphRate() { return mBuffer.GraphRate(); }
+  TrackRate GraphRate() { return mTracks.GraphRate(); }
 
   // Control API.
   // Since a stream can be played multiple ways, we need to combine independent
   // volume settings. The aKey parameter is used to keep volume settings
   // separate. Since the stream is always playing the same contents, only
   // a single audio output stream is used; the volumes are combined.
   // Currently only the first enabled audio track is played.
   // XXX change this so all enabled audio tracks are mixed and played.
@@ -430,19 +430,19 @@ public:
 
   // These Impl methods perform the core functionality of the control methods
   // above, on the media graph thread.
   /**
    * Stop all stream activity and disconnect it from all inputs and outputs.
    * This must be idempotent.
    */
   virtual void DestroyImpl();
-  StreamTime GetBufferEnd() { return mBuffer.GetEnd(); }
+  StreamTime GetBufferEnd() { return mTracks.GetEnd(); }
 #ifdef DEBUG
-  void DumpTrackInfo() { return mBuffer.DumpTrackInfo(); }
+  void DumpTrackInfo() { return mTracks.DumpTrackInfo(); }
 #endif
   void SetAudioOutputVolumeImpl(void* aKey, float aVolume);
   void AddAudioOutputImpl(void* aKey);
   // Returns true if this stream has an audio output.
   bool HasAudioOutput()
   {
     return !mAudioOutputs.IsEmpty();
   }
@@ -461,46 +461,46 @@ public:
   void RemoveConsumer(MediaInputPort* aPort)
   {
     mConsumers.RemoveElement(aPort);
   }
   uint32_t ConsumerCount()
   {
     return mConsumers.Length();
   }
-  StreamBuffer& GetStreamBuffer() { return mBuffer; }
-  GraphTime GetStreamBufferStartTime() { return mBufferStartTime; }
+  StreamTracks& GetStreamTracks() { return mTracks; }
+  GraphTime GetStreamTracksStartTime() { return mTracksStartTime; }
 
   double StreamTimeToSeconds(StreamTime aTime)
   {
     NS_ASSERTION(0 <= aTime && aTime <= STREAM_TIME_MAX, "Bad time");
-    return static_cast<double>(aTime)/mBuffer.GraphRate();
+    return static_cast<double>(aTime)/mTracks.GraphRate();
   }
   int64_t StreamTimeToMicroseconds(StreamTime aTime)
   {
     NS_ASSERTION(0 <= aTime && aTime <= STREAM_TIME_MAX, "Bad time");
-    return (aTime*1000000)/mBuffer.GraphRate();
+    return (aTime*1000000)/mTracks.GraphRate();
   }
   StreamTime SecondsToNearestStreamTime(double aSeconds)
   {
     NS_ASSERTION(0 <= aSeconds && aSeconds <= TRACK_TICKS_MAX/TRACK_RATE_MAX,
                  "Bad seconds");
-    return mBuffer.GraphRate() * aSeconds + 0.5;
+    return mTracks.GraphRate() * aSeconds + 0.5;
   }
   StreamTime MicrosecondsToStreamTimeRoundDown(int64_t aMicroseconds) {
-    return (aMicroseconds*mBuffer.GraphRate())/1000000;
+    return (aMicroseconds*mTracks.GraphRate())/1000000;
   }
 
   TrackTicks TimeToTicksRoundUp(TrackRate aRate, StreamTime aTime)
   {
-    return RateConvertTicksRoundUp(aRate, mBuffer.GraphRate(), aTime);
+    return RateConvertTicksRoundUp(aRate, mTracks.GraphRate(), aTime);
   }
   StreamTime TicksToTimeRoundDown(TrackRate aRate, TrackTicks aTicks)
   {
-    return RateConvertTicksRoundDown(mBuffer.GraphRate(), aRate, aTicks);
+    return RateConvertTicksRoundDown(mTracks.GraphRate(), aRate, aTicks);
   }
   /**
    * Convert graph time to stream time. aTime must be <= mStateComputedTime
    * to ensure we know exactly how much time this stream will be blocked during
    * the interval.
    */
   StreamTime GraphTimeToStreamTimeWithBlocking(GraphTime aTime);
   /**
@@ -518,17 +518,17 @@ public:
    */
   GraphTime StreamTimeToGraphTime(StreamTime aTime);
 
   bool IsFinishedOnGraphThread() { return mFinished; }
   void FinishOnGraphThread();
 
   bool HasCurrentData() { return mHasCurrentData; }
 
-  StreamBuffer::Track* EnsureTrack(TrackID aTrack);
+  StreamTracks::Track* EnsureTrack(TrackID aTrack);
 
   virtual void ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment, MediaSegment* aRawSegment = nullptr);
 
   DOMMediaStream* GetWrapper()
   {
     NS_ASSERTION(NS_IsMainThread(), "Only use DOMMediaStream on main thread");
     return mWrapper;
   }
@@ -551,18 +551,18 @@ public:
   {
     NS_ASSERTION(mSuspendedCount > 0, "Suspend count underrun");
     --mSuspendedCount;
   }
 
 protected:
   void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
   {
-    mBufferStartTime += aBlockedTime;
-    mBuffer.ForgetUpTo(aCurrentTime - mBufferStartTime);
+    mTracksStartTime += aBlockedTime;
+    mTracks.ForgetUpTo(aCurrentTime - mTracksStartTime);
   }
 
   void NotifyMainThreadListeners()
   {
     NS_ASSERTION(NS_IsMainThread(), "Call only on main thread");
 
     for (int32_t i = mMainThreadListeners.Length() - 1; i >= 0; --i) {
       mMainThreadListeners[i]->NotifyMainThreadStreamFinished();
@@ -579,24 +579,24 @@ protected:
 
     mFinishedNotificationSent = true;
     return true;
   }
 
   // This state is all initialized on the main thread but
   // otherwise modified only on the media graph thread.
 
-  // Buffered data. The start of the buffer corresponds to mBufferStartTime.
+  // Buffered data. The start of the buffer corresponds to mTracksStartTime.
   // Conceptually the buffer contains everything this stream has ever played,
   // but we forget some prefix of the buffered data to bound the space usage.
-  StreamBuffer mBuffer;
+  StreamTracks mTracks;
   // The time when the buffered data could be considered to have started playing.
   // This increases over time to account for time the stream was blocked before
   // mCurrentTime.
-  GraphTime mBufferStartTime;
+  GraphTime mTracksStartTime;
 
   // Client-set volume of this stream
   struct AudioOutput {
     explicit AudioOutput(void* aKey) : mKey(aKey), mVolume(1.0f) {}
     void* mKey;
     float mVolume;
   };
   nsTArray<AudioOutput> mAudioOutputs;
@@ -605,17 +605,17 @@ protected:
   // with a different frame id.
   VideoFrame mLastPlayedVideoFrame;
   nsTArray<RefPtr<MediaStreamListener> > mListeners;
   nsTArray<MainThreadMediaStreamListener*> mMainThreadListeners;
   nsTArray<TrackID> mDisabledTrackIDs;
 
   // GraphTime at which this stream starts blocking.
   // This is only valid up to mStateComputedTime. The stream is considered to
-  // have not been blocked before mCurrentTime (its mBufferStartTime is increased
+  // have not been blocked before mCurrentTime (its mTracksStartTime is increased
   // as necessary to account for that time instead).
   GraphTime mStartBlocking;
 
   // MediaInputPorts to which this is connected
   nsTArray<MediaInputPort*> mConsumers;
 
   // Where audio output is going. There is one AudioOutputStream per
   // audio track.
@@ -751,17 +751,17 @@ public:
    * Call after a series of AddTrack or AddAudioTrack calls to implement
    * any pending track adds.
    */
   void FinishAddTracks();
 
   /**
    * Find track by track id.
    */
-  StreamBuffer::Track* FindTrack(TrackID aID);
+  StreamTracks::Track* FindTrack(TrackID aID);
 
   /**
    * Append media data to a track. Ownership of aSegment remains with the caller,
    * but aSegment is emptied.
    * Returns false if the data was not appended because no such track exists
    * or the stream was already finished.
    */
   bool AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment = nullptr);
--- a/dom/media/MediaStreamGraphImpl.h
+++ b/dom/media/MediaStreamGraphImpl.h
@@ -342,17 +342,17 @@ public:
    */
   StreamTime PlayAudio(MediaStream* aStream);
   /**
    * Set the correct current video frame for stream aStream.
    */
   void PlayVideo(MediaStream* aStream);
   /**
    * No more data will be forthcoming for aStream. The stream will end
-   * at the current buffer end point. The StreamBuffer's tracks must be
+   * at the current buffer end point. The StreamTracks's tracks must be
    * explicitly set to finished by the caller.
    */
   void FinishStream(MediaStream* aStream);
   /**
    * Compute how much stream data we would like to buffer for aStream.
    */
   StreamTime GetDesiredBufferEnd(MediaStream* aStream);
   /**
--- a/dom/media/MediaStreamTrack.h
+++ b/dom/media/MediaStreamTrack.h
@@ -3,17 +3,17 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MEDIASTREAMTRACK_H_
 #define MEDIASTREAMTRACK_H_
 
 #include "mozilla/DOMEventTargetHelper.h"
 #include "nsID.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 #include "MediaTrackConstraints.h"
 
 namespace mozilla {
 
 class DOMMediaStream;
 
 namespace dom {
 
new file mode 100644
--- /dev/null
+++ b/dom/media/StreamTracks.cpp
@@ -0,0 +1,117 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "StreamTracks.h"
+#include "mozilla/Logging.h"
+#include <algorithm>
+
+namespace mozilla {
+
+extern LazyLogModule gMediaStreamGraphLog;
+#define STREAM_LOG(type, msg) MOZ_LOG(gMediaStreamGraphLog, type, msg)
+
+#ifdef DEBUG
+void
+StreamTracks::DumpTrackInfo() const
+{
+  STREAM_LOG(LogLevel::Info, ("DumpTracks: mTracksKnownTime %lld", mTracksKnownTime));
+  for (uint32_t i = 0; i < mTracks.Length(); ++i) {
+    Track* track = mTracks[i];
+    if (track->IsEnded()) {
+      STREAM_LOG(LogLevel::Info, ("Track[%d] %d: ended", i, track->GetID()));
+    } else {
+      STREAM_LOG(LogLevel::Info, ("Track[%d] %d: %lld", i, track->GetID(),
+                                 track->GetEnd()));
+    }
+  }
+}
+#endif
+
+StreamTime
+StreamTracks::GetEnd() const
+{
+  StreamTime t = mTracksKnownTime;
+  for (uint32_t i = 0; i < mTracks.Length(); ++i) {
+    Track* track = mTracks[i];
+    if (!track->IsEnded()) {
+      t = std::min(t, track->GetEnd());
+    }
+  }
+  return t;
+}
+
+StreamTime
+StreamTracks::GetAllTracksEnd() const
+{
+  if (mTracksKnownTime < STREAM_TIME_MAX) {
+    // A track might be added.
+    return STREAM_TIME_MAX;
+  }
+  StreamTime t = 0;
+  for (uint32_t i = 0; i < mTracks.Length(); ++i) {
+    Track* track = mTracks[i];
+    if (!track->IsEnded()) {
+      return STREAM_TIME_MAX;
+    }
+    t = std::max(t, track->GetEnd());
+  }
+  return t;
+}
+
+StreamTracks::Track*
+StreamTracks::FindTrack(TrackID aID)
+{
+  if (aID == TRACK_NONE || mTracks.IsEmpty()) {
+    return nullptr;
+  }
+
+  // The tracks are sorted by ID. We can use a binary search.
+
+  uint32_t left = 0, right = mTracks.Length() - 1;
+  while (left <= right) {
+    uint32_t middle = (left + right) / 2;
+    if (mTracks[middle]->GetID() == aID) {
+      return mTracks[middle];
+    }
+
+    if (mTracks[middle]->GetID() > aID) {
+      if (middle == 0) {
+        break;
+      }
+
+      right = middle - 1;
+    } else {
+      left = middle + 1;
+    }
+  }
+
+  return nullptr;
+}
+
+void
+StreamTracks::ForgetUpTo(StreamTime aTime)
+{
+  // Only prune if there is a reasonable chunk (50ms @ 48kHz) to forget, so we
+  // don't spend too much time pruning segments.
+  const StreamTime minChunkSize = 2400;
+  if (aTime < mForgottenTime + minChunkSize) {
+    return;
+  }
+  mForgottenTime = aTime;
+
+  for (uint32_t i = 0; i < mTracks.Length(); ++i) {
+    Track* track = mTracks[i];
+    if (track->IsEnded() && track->GetEnd() <= aTime) {
+      mTracks.RemoveElementAt(i);
+      mTracksDirty = true;
+      --i;
+      continue;
+    }
+    StreamTime forgetTo = std::min(track->GetEnd() - 1, aTime);
+    track->ForgetUpTo(forgetTo);
+  }
+}
+
+} // namespace mozilla
new file mode 100644
--- /dev/null
+++ b/dom/media/StreamTracks.h
@@ -0,0 +1,339 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_STREAMTRACKS_H_
+#define MOZILLA_STREAMTRACKS_H_
+
+#include "MediaSegment.h"
+#include "nsAutoPtr.h"
+
+namespace mozilla {
+
+/**
+ * Unique ID for track within a StreamTracks. Tracks from different
+ * StreamTrackss may have the same ID; this matters when appending StreamTrackss,
+ * since tracks with the same ID are matched. Only IDs greater than 0 are allowed.
+ */
+typedef int32_t TrackID;
+const TrackID TRACK_NONE = 0;
+const TrackID TRACK_INVALID = -1;
+const TrackID TRACK_ANY = -2;
+
+inline TrackTicks RateConvertTicksRoundDown(TrackRate aOutRate,
+                                            TrackRate aInRate,
+                                            TrackTicks aTicks)
+{
+  NS_ASSERTION(0 < aOutRate && aOutRate <= TRACK_RATE_MAX, "Bad out rate");
+  NS_ASSERTION(0 < aInRate && aInRate <= TRACK_RATE_MAX, "Bad in rate");
+  NS_WARN_IF_FALSE(0 <= aTicks && aTicks <= TRACK_TICKS_MAX, "Bad ticks"); // bug 957691
+  return (aTicks * aOutRate) / aInRate;
+}
+inline TrackTicks RateConvertTicksRoundUp(TrackRate aOutRate,
+                                          TrackRate aInRate, TrackTicks aTicks)
+{
+  NS_ASSERTION(0 < aOutRate && aOutRate <= TRACK_RATE_MAX, "Bad out rate");
+  NS_ASSERTION(0 < aInRate && aInRate <= TRACK_RATE_MAX, "Bad in rate");
+  NS_ASSERTION(0 <= aTicks && aTicks <= TRACK_TICKS_MAX, "Bad ticks");
+  return (aTicks * aOutRate + aInRate - 1) / aInRate;
+}
+
+/**
+ * This object contains the decoded data for a stream's tracks.
+ * A StreamTracks can be appended to. Logically a StreamTracks only gets longer,
+ * but we also have the ability to "forget" data before a certain time that
+ * we know won't be used again. (We prune a whole number of seconds internally.)
+ *
+ * StreamTrackss should only be used from one thread at a time.
+ *
+ * A StreamTracks has a set of tracks that can be of arbitrary types ---
+ * the data for each track is a MediaSegment. The set of tracks can vary
+ * over the timeline of the StreamTracks.
+ */
+class StreamTracks
+{
+public:
+  /**
+   * Every track has a start time --- when it started in the StreamTracks.
+   * It has an end flag; when false, no end point is known; when true,
+   * the track ends when the data we have for the track runs out.
+   * Tracks have a unique ID assigned at creation. This allows us to identify
+   * the same track across StreamTrackss. A StreamTracks should never have
+   * two tracks with the same ID (even if they don't overlap in time).
+   * TODO Tracks can also be enabled and disabled over time.
+   * Takes ownership of aSegment.
+   */
+  class Track final
+  {
+    Track(TrackID aID, StreamTime aStart, MediaSegment* aSegment)
+      : mStart(aStart),
+        mSegment(aSegment),
+        mID(aID),
+        mEnded(false)
+    {
+      MOZ_COUNT_CTOR(Track);
+
+      NS_ASSERTION(aID > TRACK_NONE, "Bad track ID");
+      NS_ASSERTION(0 <= aStart && aStart <= aSegment->GetDuration(), "Bad start position");
+    }
+
+  public:
+    ~Track()
+    {
+      MOZ_COUNT_DTOR(Track);
+    }
+
+    template <class T> T* Get() const
+    {
+      if (mSegment->GetType() == T::StaticType()) {
+        return static_cast<T*>(mSegment.get());
+      }
+      return nullptr;
+    }
+
+    MediaSegment* GetSegment() const { return mSegment; }
+    TrackID GetID() const { return mID; }
+    bool IsEnded() const { return mEnded; }
+    StreamTime GetStart() const { return mStart; }
+    StreamTime GetEnd() const { return mSegment->GetDuration(); }
+    MediaSegment::Type GetType() const { return mSegment->GetType(); }
+
+    void SetEnded() { mEnded = true; }
+    void AppendFrom(Track* aTrack)
+    {
+      NS_ASSERTION(!mEnded, "Can't append to ended track");
+      NS_ASSERTION(aTrack->mID == mID, "IDs must match");
+      NS_ASSERTION(aTrack->mStart == 0, "Source track must start at zero");
+      NS_ASSERTION(aTrack->mSegment->GetType() == GetType(), "Track types must match");
+
+      mSegment->AppendFrom(aTrack->mSegment);
+      mEnded = aTrack->mEnded;
+    }
+    MediaSegment* RemoveSegment()
+    {
+      return mSegment.forget();
+    }
+    void ForgetUpTo(StreamTime aTime)
+    {
+      mSegment->ForgetUpTo(aTime);
+    }
+    void FlushAfter(StreamTime aNewEnd)
+    {
+      // Forget everything after a given endpoint
+      // a specified amount
+      mSegment->FlushAfter(aNewEnd);
+    }
+
+    size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
+    {
+      size_t amount = aMallocSizeOf(this);
+      if (mSegment) {
+        amount += mSegment->SizeOfIncludingThis(aMallocSizeOf);
+      }
+      return amount;
+    }
+
+  private:
+    friend class StreamTracks;
+
+    // Start offset is in ticks at rate mRate
+    StreamTime mStart;
+    // The segment data starts at the start of the owning StreamTracks, i.e.,
+    // there's mStart silence/no video at the beginning.
+    nsAutoPtr<MediaSegment> mSegment;
+    // Unique ID
+    TrackID mID;
+    // True when the track ends with the data in mSegment
+    bool mEnded;
+  };
+
+  class MOZ_STACK_CLASS CompareTracksByID final
+  {
+  public:
+    bool Equals(Track* aA, Track* aB) const {
+      return aA->GetID() == aB->GetID();
+    }
+    bool LessThan(Track* aA, Track* aB) const {
+      return aA->GetID() < aB->GetID();
+    }
+  };
+
+  StreamTracks()
+    : mGraphRate(0)
+    , mTracksKnownTime(0)
+    , mForgottenTime(0)
+    , mTracksDirty(false)
+#ifdef DEBUG
+    , mGraphRateIsSet(false)
+#endif
+  {
+    MOZ_COUNT_CTOR(StreamTracks);
+  }
+  ~StreamTracks()
+  {
+    MOZ_COUNT_DTOR(StreamTracks);
+  }
+
+  size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
+  {
+    size_t amount = 0;
+    amount += mTracks.ShallowSizeOfExcludingThis(aMallocSizeOf);
+    for (size_t i = 0; i < mTracks.Length(); i++) {
+      amount += mTracks[i]->SizeOfIncludingThis(aMallocSizeOf);
+    }
+    return amount;
+  }
+
+  /**
+   * Initialize the graph rate for use in calculating StreamTimes from track
+   * ticks.  Called when a MediaStream's graph pointer is initialized.
+   */
+  void InitGraphRate(TrackRate aGraphRate)
+  {
+    mGraphRate = aGraphRate;
+#if DEBUG
+    MOZ_ASSERT(!mGraphRateIsSet);
+    mGraphRateIsSet = true;
+#endif
+  }
+
+  TrackRate GraphRate() const
+  {
+    MOZ_ASSERT(mGraphRateIsSet);
+    return mGraphRate;
+  }
+
+  /**
+   * Takes ownership of aSegment. Don't do this while iterating, or while
+   * holding a Track reference.
+   * aSegment must have aStart worth of null data.
+   */
+  Track& AddTrack(TrackID aID, StreamTime aStart, MediaSegment* aSegment)
+  {
+    NS_ASSERTION(!FindTrack(aID), "Track with this ID already exists");
+
+    Track* track = new Track(aID, aStart, aSegment);
+    mTracks.InsertElementSorted(track, CompareTracksByID());
+    mTracksDirty = true;
+
+    if (mTracksKnownTime == STREAM_TIME_MAX) {
+      // There exists code like
+      // http://mxr.mozilla.org/mozilla-central/source/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp?rev=96b197deb91e&mark=1292-1297#1292
+      NS_WARNING("Adding track to StreamTracks that should have no more tracks");
+    } else {
+      NS_ASSERTION(mTracksKnownTime <= aStart, "Start time too early");
+    }
+    return *track;
+  }
+
+  void AdvanceKnownTracksTime(StreamTime aKnownTime)
+  {
+    NS_ASSERTION(aKnownTime >= mTracksKnownTime, "Can't move tracks-known time earlier");
+    mTracksKnownTime = aKnownTime;
+  }
+
+  /**
+   * The end time for the StreamTracks is the latest time for which we have
+   * data for all tracks that haven't ended by that time.
+   */
+  StreamTime GetEnd() const;
+
+  /**
+   * Returns the earliest time >= 0 at which all tracks have ended
+   * and all their data has been played out and no new tracks can be added,
+   * or STREAM_TIME_MAX if there is no such time.
+   */
+  StreamTime GetAllTracksEnd() const;
+
+#ifdef DEBUG
+  void DumpTrackInfo() const;
+#endif
+
+  Track* FindTrack(TrackID aID);
+
+  class MOZ_STACK_CLASS TrackIter final
+  {
+  public:
+    /**
+     * Iterate through the tracks of aBuffer in order of ID.
+     */
+    explicit TrackIter(const StreamTracks& aBuffer) :
+      mBuffer(&aBuffer.mTracks), mIndex(0), mMatchType(false) {}
+    /**
+     * Iterate through the tracks of aBuffer with type aType, in order of ID.
+     */
+    TrackIter(const StreamTracks& aBuffer, MediaSegment::Type aType) :
+      mBuffer(&aBuffer.mTracks), mIndex(0), mType(aType), mMatchType(true) { FindMatch(); }
+    bool IsEnded() { return mIndex >= mBuffer->Length(); }
+    void Next()
+    {
+      ++mIndex;
+      FindMatch();
+    }
+    Track* get() { return mBuffer->ElementAt(mIndex); }
+    Track& operator*() { return *mBuffer->ElementAt(mIndex); }
+    Track* operator->() { return mBuffer->ElementAt(mIndex); }
+  private:
+    void FindMatch()
+    {
+      if (!mMatchType)
+        return;
+      while (mIndex < mBuffer->Length() &&
+             mBuffer->ElementAt(mIndex)->GetType() != mType) {
+        ++mIndex;
+      }
+    }
+
+    const nsTArray<nsAutoPtr<Track> >* mBuffer;
+    uint32_t mIndex;
+    MediaSegment::Type mType;
+    bool mMatchType;
+  };
+  friend class TrackIter;
+
+  /**
+   * Forget stream data before aTime; they will no longer be needed.
+   * Also can forget entire tracks that have ended at or before aTime.
+   * Can't be used to forget beyond GetEnd().
+   */
+  void ForgetUpTo(StreamTime aTime);
+  /**
+   * Returns the latest time passed to ForgetUpTo.
+   */
+  StreamTime GetForgottenDuration()
+  {
+    return mForgottenTime;
+  }
+
+  bool GetAndResetTracksDirty()
+  {
+    if (!mTracksDirty) {
+      return false;
+    }
+
+    mTracksDirty = false;
+    return true;
+  }
+
+protected:
+  TrackRate mGraphRate; // StreamTime per second
+  // Any new tracks added will start at or after this time. In other words, the track
+  // list is complete and correct for all times less than this time.
+  StreamTime mTracksKnownTime;
+  StreamTime mForgottenTime;
+
+private:
+  // All known tracks for this StreamTracks
+  nsTArray<nsAutoPtr<Track>> mTracks;
+  bool mTracksDirty;
+
+#ifdef DEBUG
+  bool mGraphRateIsSet;
+#endif
+};
+
+} // namespace mozilla
+
+#endif /* MOZILLA_STREAMTRACKS_H_ */
+
--- a/dom/media/TrackUnionStream.cpp
+++ b/dom/media/TrackUnionStream.cpp
@@ -82,24 +82,24 @@ TrackUnionStream::TrackUnionStream(DOMMe
         // not just that it's finishing when all its queued data eventually runs
         // out.
         allFinished = false;
       }
       if (!stream->HasCurrentData()) {
         allHaveCurrentData = false;
       }
       bool trackAdded = false;
-      for (StreamBuffer::TrackIter tracks(stream->GetStreamBuffer());
+      for (StreamTracks::TrackIter tracks(stream->GetStreamTracks());
            !tracks.IsEnded(); tracks.Next()) {
         bool found = false;
         for (uint32_t j = 0; j < mTrackMap.Length(); ++j) {
           TrackMapEntry* map = &mTrackMap[j];
           if (map->mInputPort == mInputs[i] && map->mInputTrackID == tracks->GetID()) {
-            bool trackFinished;
-            StreamBuffer::Track* outputTrack = mBuffer.FindTrack(map->mOutputTrackID);
+            bool trackFinished = false;
+            StreamTracks::Track* outputTrack = mTracks.FindTrack(map->mOutputTrackID);
             found = true;
             if (!outputTrack || outputTrack->IsEnded() ||
                 !mInputs[i]->PassTrackThrough(tracks->GetID())) {
               trackFinished = true;
             } else {
               CopyTrackData(tracks.get(), j, aFrom, aTo, &trackFinished);
             }
             mappedTracksFinished[j] = trackFinished;
@@ -133,25 +133,25 @@ TrackUnionStream::TrackUnionStream(DOMMe
       }
     }
     if (allFinished && mAutofinish && (aFlags & ALLOW_FINISH)) {
       // All streams have finished and won't add any more tracks, and
       // all our tracks have actually finished and been removed from our map,
       // so we're finished now.
       FinishOnGraphThread();
     } else {
-      mBuffer.AdvanceKnownTracksTime(GraphTimeToStreamTimeWithBlocking(aTo));
+      mTracks.AdvanceKnownTracksTime(GraphTimeToStreamTimeWithBlocking(aTo));
     }
     if (allHaveCurrentData) {
       // We can make progress if we're not blocked
       mHasCurrentData = true;
     }
   }
 
-  uint32_t TrackUnionStream::AddTrack(MediaInputPort* aPort, StreamBuffer::Track* aTrack,
+  uint32_t TrackUnionStream::AddTrack(MediaInputPort* aPort, StreamTracks::Track* aTrack,
                     GraphTime aFrom)
   {
     TrackID id = aTrack->GetID();
     if (id > mNextAvailableTrackID &&
        mUsedTracks.BinaryIndexOf(id) == mUsedTracks.NoIndex) {
       // Input id available. Mark it used in mUsedTracks.
       mUsedTracks.InsertElementSorted(id);
     } else {
@@ -178,18 +178,18 @@ TrackUnionStream::TrackUnionStream(DOMMe
     for (uint32_t j = 0; j < mListeners.Length(); ++j) {
       MediaStreamListener* l = mListeners[j];
       l->NotifyQueuedTrackChanges(Graph(), id, outputStart,
                                   MediaStreamListener::TRACK_EVENT_CREATED,
                                   *segment,
                                   aPort->GetSource(), aTrack->GetID());
     }
     segment->AppendNullData(outputStart);
-    StreamBuffer::Track* track =
-      &mBuffer.AddTrack(id, outputStart, segment.forget());
+    StreamTracks::Track* track =
+      &mTracks.AddTrack(id, outputStart, segment.forget());
     STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p adding track %d for input stream %p track %d, start ticks %lld",
                               this, id, aPort->GetSource(), aTrack->GetID(),
                               (long long)outputStart));
 
     TrackMapEntry* map = mTrackMap.AppendElement();
     map->mEndOfConsumedInputTicks = 0;
     map->mEndOfLastInputIntervalInInputStream = -1;
     map->mEndOfLastInputIntervalInOutputStream = -1;
@@ -197,17 +197,17 @@ TrackUnionStream::TrackUnionStream(DOMMe
     map->mInputTrackID = aTrack->GetID();
     map->mOutputTrackID = track->GetID();
     map->mSegment = aTrack->GetSegment()->CreateEmptyClone();
     return mTrackMap.Length() - 1;
   }
 
   void TrackUnionStream::EndTrack(uint32_t aIndex)
   {
-    StreamBuffer::Track* outputTrack = mBuffer.FindTrack(mTrackMap[aIndex].mOutputTrackID);
+    StreamTracks::Track* outputTrack = mTracks.FindTrack(mTrackMap[aIndex].mOutputTrackID);
     if (!outputTrack || outputTrack->IsEnded())
       return;
     STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p ending track %d", this, outputTrack->GetID()));
     for (uint32_t j = 0; j < mListeners.Length(); ++j) {
       MediaStreamListener* l = mListeners[j];
       StreamTime offset = outputTrack->GetSegment()->GetDuration();
       nsAutoPtr<MediaSegment> segment;
       segment = outputTrack->GetSegment()->CreateEmptyClone();
@@ -215,22 +215,22 @@ TrackUnionStream::TrackUnionStream(DOMMe
                                   MediaStreamListener::TRACK_EVENT_ENDED,
                                   *segment,
                                   mTrackMap[aIndex].mInputPort->GetSource(),
                                   mTrackMap[aIndex].mInputTrackID);
     }
     outputTrack->SetEnded();
   }
 
-  void TrackUnionStream::CopyTrackData(StreamBuffer::Track* aInputTrack,
+  void TrackUnionStream::CopyTrackData(StreamTracks::Track* aInputTrack,
                      uint32_t aMapIndex, GraphTime aFrom, GraphTime aTo,
                      bool* aOutputTrackFinished)
   {
     TrackMapEntry* map = &mTrackMap[aMapIndex];
-    StreamBuffer::Track* outputTrack = mBuffer.FindTrack(map->mOutputTrackID);
+    StreamTracks::Track* outputTrack = mTracks.FindTrack(map->mOutputTrackID);
     MOZ_ASSERT(outputTrack && !outputTrack->IsEnded(), "Can't copy to ended track");
 
     MediaSegment* segment = map->mSegment;
     MediaStream* source = map->mInputPort->GetSource();
 
     GraphTime next;
     *aOutputTrackFinished = false;
     for (GraphTime t = aFrom; t < aTo; t = next) {
--- a/dom/media/TrackUnionStream.h
+++ b/dom/media/TrackUnionStream.h
@@ -34,29 +34,29 @@ protected:
     // mEndOfLastInputIntervalInOutputStream is the timestamp for the end of the
     // previous interval which was unblocked for both the input and output
     // stream, in the output stream's timeline, or -1 if there wasn't one.
     StreamTime mEndOfLastInputIntervalInOutputStream;
     MediaInputPort* mInputPort;
     // We keep track IDs instead of track pointers because
     // tracks can be removed without us being notified (e.g.
     // when a finished track is forgotten.) When we need a Track*,
-    // we call StreamBuffer::FindTrack, which will return null if
+    // we call StreamTracks::FindTrack, which will return null if
     // the track has been deleted.
     TrackID mInputTrackID;
     TrackID mOutputTrackID;
     nsAutoPtr<MediaSegment> mSegment;
   };
 
   // Add the track to this stream, retaining its TrackID if it has never
   // been previously used in this stream, allocating a new TrackID otherwise.
-  uint32_t AddTrack(MediaInputPort* aPort, StreamBuffer::Track* aTrack,
+  uint32_t AddTrack(MediaInputPort* aPort, StreamTracks::Track* aTrack,
                     GraphTime aFrom);
   void EndTrack(uint32_t aIndex);
-  void CopyTrackData(StreamBuffer::Track* aInputTrack,
+  void CopyTrackData(StreamTracks::Track* aInputTrack,
                      uint32_t aMapIndex, GraphTime aFrom, GraphTime aTo,
                      bool* aOutputTrackFinished);
 
   nsTArray<TrackMapEntry> mTrackMap;
 
   // The next available TrackID, starting at 1 and progressing upwards.
   // All TrackIDs in [1, mNextAvailableTrackID) have implicitly been used.
   TrackID mNextAvailableTrackID;
--- a/dom/media/encoder/TrackEncoder.h
+++ b/dom/media/encoder/TrackEncoder.h
@@ -5,17 +5,17 @@
 
 #ifndef TrackEncoder_h_
 #define TrackEncoder_h_
 
 #include "mozilla/ReentrantMonitor.h"
 
 #include "AudioSegment.h"
 #include "EncodedFrameContainer.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 #include "TrackMetadataBase.h"
 #include "VideoSegment.h"
 #include "MediaStreamGraph.h"
 
 namespace mozilla {
 
 /**
  * Base class of AudioTrackEncoder and VideoTrackEncoder. Lifetimes managed by
--- a/dom/media/moz.build
+++ b/dom/media/moz.build
@@ -130,17 +130,17 @@ EXPORTS += [
     'MediaTrackList.h',
     'MP3Decoder.h',
     'MP3Demuxer.h',
     'MP3FrameParser.h',
     'nsIDocumentActivity.h',
     'RtspMediaResource.h',
     'SelfRef.h',
     'SharedBuffer.h',
-    'StreamBuffer.h',
+    'StreamTracks.h',
     'ThreadPoolCOMListener.h',
     'TimeUnits.h',
     'TrackUnionStream.h',
     'VideoFrameContainer.h',
     'VideoSegment.h',
     'VideoUtils.h',
     'VorbisUtils.h',
     'XiphExtradata.h',
@@ -221,17 +221,17 @@ UNIFIED_SOURCES += [
     'MediaStreamTrack.cpp',
     'MediaTimer.cpp',
     'MediaTrack.cpp',
     'MediaTrackList.cpp',
     'MP3Decoder.cpp',
     'MP3Demuxer.cpp',
     'MP3FrameParser.cpp',
     'RtspMediaResource.cpp',
-    'StreamBuffer.cpp',
+    'StreamTracks.cpp',
     'TextTrack.cpp',
     'TextTrackCue.cpp',
     'TextTrackCueList.cpp',
     'TextTrackList.cpp',
     'TextTrackRegion.cpp',
     'TrackUnionStream.cpp',
     'VideoFrameContainer.cpp',
     'VideoPlaybackQuality.cpp',
--- a/dom/media/webaudio/AudioNodeExternalInputStream.cpp
+++ b/dom/media/webaudio/AudioNodeExternalInputStream.cpp
@@ -134,19 +134,19 @@ AudioNodeExternalInputStream::ProcessInp
     return;
   }
 
   MOZ_ASSERT(mInputs.Length() == 1);
 
   MediaStream* source = mInputs[0]->GetSource();
   nsAutoTArray<AudioSegment,1> audioSegments;
   uint32_t inputChannels = 0;
-  for (StreamBuffer::TrackIter tracks(source->mBuffer, MediaSegment::AUDIO);
+  for (StreamTracks::TrackIter tracks(source->mTracks, MediaSegment::AUDIO);
        !tracks.IsEnded(); tracks.Next()) {
-    const StreamBuffer::Track& inputTrack = *tracks;
+    const StreamTracks::Track& inputTrack = *tracks;
     if (!mInputs[0]->PassTrackThrough(tracks->GetID())) {
       continue;
     }
 
     const AudioSegment& inputSegment =
         *static_cast<AudioSegment*>(inputTrack.GetSegment());
     if (inputSegment.IsNull()) {
       continue;
--- a/dom/media/webaudio/AudioNodeStream.cpp
+++ b/dom/media/webaudio/AudioNodeStream.cpp
@@ -379,19 +379,19 @@ AudioNodeStream::ComputedNumberOfChannel
 
 class AudioNodeStream::AdvanceAndResumeMessage final : public ControlMessage {
 public:
   AdvanceAndResumeMessage(AudioNodeStream* aStream, StreamTime aAdvance) :
     ControlMessage(aStream), mAdvance(aAdvance) {}
   void Run() override
   {
     auto ns = static_cast<AudioNodeStream*>(mStream);
-    ns->mBufferStartTime -= mAdvance;
+    ns->mTracksStartTime -= mAdvance;
 
-    StreamBuffer::Track* track = ns->EnsureTrack(AUDIO_TRACK);
+    StreamTracks::Track* track = ns->EnsureTrack(AUDIO_TRACK);
     track->Get<AudioSegment>()->AppendNullData(mAdvance);
 
     ns->GraphImpl()->DecrementSuspendCount(mStream);
   }
 private:
   StreamTime mAdvance;
 };
 
@@ -624,19 +624,19 @@ AudioNodeStream::ProduceOutputBeforeInpu
       mLastChunks[0].SetNull(WEBAUDIO_BLOCK_SIZE);
     }
   }
 }
 
 void
 AudioNodeStream::AdvanceOutputSegment()
 {
-  StreamBuffer::Track* track = EnsureTrack(AUDIO_TRACK);
+  StreamTracks::Track* track = EnsureTrack(AUDIO_TRACK);
   // No more tracks will be coming
-  mBuffer.AdvanceKnownTracksTime(STREAM_TIME_MAX);
+  mTracks.AdvanceKnownTracksTime(STREAM_TIME_MAX);
 
   AudioSegment* segment = track->Get<AudioSegment>();
 
   if (!mLastChunks[0].IsNull()) {
     segment->AppendAndConsumeChunk(mLastChunks[0].AsMutableChunk());
   } else {
     segment->AppendNullData(mLastChunks[0].GetDuration());
   }
@@ -649,17 +649,17 @@ AudioNodeStream::AdvanceOutputSegment()
     l->NotifyQueuedTrackChanges(Graph(), AUDIO_TRACK,
                                 segment->GetDuration(), 0, tmpSegment);
   }
 }
 
 void
 AudioNodeStream::FinishOutput()
 {
-  StreamBuffer::Track* track = EnsureTrack(AUDIO_TRACK);
+  StreamTracks::Track* track = EnsureTrack(AUDIO_TRACK);
   track->SetEnded();
 
   for (uint32_t j = 0; j < mListeners.Length(); ++j) {
     MediaStreamListener* l = mListeners[j];
     AudioSegment emptySegment;
     l->NotifyQueuedTrackChanges(Graph(), AUDIO_TRACK,
                                 track->GetSegment()->GetDuration(),
                                 MediaStreamListener::TRACK_EVENT_ENDED, emptySegment);
--- a/dom/media/webrtc/MediaEngineDefault.h
+++ b/dom/media/webrtc/MediaEngineDefault.h
@@ -11,17 +11,17 @@
 #include "DOMMediaStream.h"
 #include "nsComponentManagerUtils.h"
 #include "mozilla/Monitor.h"
 
 #include "VideoUtils.h"
 #include "MediaEngine.h"
 #include "VideoSegment.h"
 #include "AudioSegment.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 #include "MediaStreamGraph.h"
 #include "MediaTrackConstraints.h"
 
 namespace mozilla {
 
 namespace layers {
 class ImageContainer;
 } // namespace layers
@@ -132,17 +132,17 @@ public:
                   int32_t aPlayoutDelay) override { return NS_OK; };
   void AppendToSegment(AudioSegment& aSegment, TrackTicks aSamples);
   void NotifyPull(MediaStreamGraph* aGraph,
                   SourceMediaStream *aSource,
                   TrackID aId,
                   StreamTime aDesiredTime) override
   {
 #ifdef DEBUG
-    StreamBuffer::Track* data = aSource->FindTrack(aId);
+    StreamTracks::Track* data = aSource->FindTrack(aId);
     NS_WARN_IF_FALSE(!data || data->IsEnded() ||
                      aDesiredTime <= aSource->GetEndOfAppendedData(aId),
                      "MediaEngineDefaultAudioSource data underrun");
 #endif
   }
 
   bool IsFake() override {
     return true;
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.h
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.h
@@ -19,17 +19,17 @@
 #include "DOMMediaStream.h"
 #include "nsDirectoryServiceDefs.h"
 #include "nsComponentManagerUtils.h"
 
 #include "VideoUtils.h"
 #include "MediaEngineCameraVideoSource.h"
 #include "VideoSegment.h"
 #include "AudioSegment.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 #include "MediaStreamGraph.h"
 
 #include "MediaEngineWrapper.h"
 #include "mozilla/dom/MediaStreamTrackBinding.h"
 
 // WebRTC library includes follow
 #include "webrtc/common.h"
 #include "webrtc/video_engine/include/vie_capture.h"
--- a/dom/media/webrtc/MediaEngineWebRTC.h
+++ b/dom/media/webrtc/MediaEngineWebRTC.h
@@ -19,17 +19,17 @@
 #include "nsDirectoryServiceDefs.h"
 #include "nsComponentManagerUtils.h"
 #include "nsRefPtrHashtable.h"
 
 #include "VideoUtils.h"
 #include "MediaEngineCameraVideoSource.h"
 #include "VideoSegment.h"
 #include "AudioSegment.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 #include "MediaStreamGraph.h"
 
 #include "MediaEngineWrapper.h"
 #include "mozilla/dom/MediaStreamTrackBinding.h"
 // WebRTC library includes follow
 #include "webrtc/common.h"
 // Audio Engine
 #include "webrtc/voice_engine/include/voe_base.h"
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h
@@ -26,17 +26,17 @@
 #include "nsIThread.h"
 
 #include "signaling/src/jsep/JsepSession.h"
 #include "signaling/src/jsep/JsepSessionImpl.h"
 #include "signaling/src/sdp/SdpMediaSection.h"
 
 #include "mozilla/ErrorResult.h"
 #include "mozilla/dom/PeerConnectionImplEnumsBinding.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 
 #if !defined(MOZILLA_EXTERNAL_LINKAGE)
 #include "mozilla/TimeStamp.h"
 #include "mozilla/net/DataChannel.h"
 #include "VideoUtils.h"
 #include "VideoSegment.h"
 #include "mozilla/dom/RTCStatsReportBinding.h"
 #include "nsIPrincipal.h"
--- a/media/webrtc/signaling/test/FakeMediaStreams.h
+++ b/media/webrtc/signaling/test/FakeMediaStreams.h
@@ -16,17 +16,17 @@
 #include "nsIComponentRegistrar.h"
 #include "nsISupportsImpl.h"
 #include "nsServiceManagerUtils.h"
 
 // #includes from MediaStream.h
 #include "mozilla/Mutex.h"
 #include "AudioSegment.h"
 #include "MediaSegment.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 #include "nsTArray.h"
 #include "nsIRunnable.h"
 #include "nsISupportsImpl.h"
 
 class nsIDOMWindow;
 
 namespace mozilla {
    class MediaStreamGraphImpl;
--- a/media/webrtc/signaling/test/FakePCObserver.h
+++ b/media/webrtc/signaling/test/FakePCObserver.h
@@ -9,17 +9,17 @@
 #include "nsITimer.h"
 #include "nsComponentManagerUtils.h"
 #include "nsIComponentManager.h"
 #include "nsIComponentRegistrar.h"
 
 #include "mozilla/Mutex.h"
 #include "AudioSegment.h"
 #include "MediaSegment.h"
-#include "StreamBuffer.h"
+#include "StreamTracks.h"
 #include "nsTArray.h"
 #include "nsIRunnable.h"
 #include "nsISupportsImpl.h"
 #include "mozilla/dom/PeerConnectionObserverEnumsBinding.h"
 #include "PeerConnectionImpl.h"
 #include "nsWeakReference.h"
 
 namespace mozilla {