Bug 1350241 -Part2: Create a customized player based on ExoPlayer and inject customzied {Audio,Video}Renderers as the source of HLS demuxed samples. draft
authorKilik Kuo <kikuo@mozilla.com>
Thu, 25 May 2017 20:48:43 +0800
changeset 584399 c4bdd6c56c7928dc64aa9375ca8459f7f53bf32d
parent 584398 581938b4909eec326e2abb09009e798ad6c3525c
child 584412 7111d29eff59c7576a276a23cf9f8e508f19598f
push id60726
push userbmo:kikuo@mozilla.com
push dateThu, 25 May 2017 13:00:20 +0000
bugs1350241
milestone55.0a1
Bug 1350241 -Part2: Create a customized player based on ExoPlayer and inject customzied {Audio,Video}Renderers as the source of HLS demuxed samples. MozReview-Commit-ID: CAyaxKUoJzm
mobile/android/base/moz.build
mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsAudioRenderer.java
mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsPlayer.java
mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsRendererBase.java
mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsVideoRenderer.java
mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/Utils.java
--- a/mobile/android/base/moz.build
+++ b/mobile/android/base/moz.build
@@ -460,18 +460,23 @@ gvjar.sources += [geckoview_thirdparty_s
     'java/com/googlecode/eyesfree/braille/selfbraille/ISelfBrailleService.java',
     'java/com/googlecode/eyesfree/braille/selfbraille/SelfBrailleClient.java',
     'java/com/googlecode/eyesfree/braille/selfbraille/WriteData.java',
 ]]
 
 if CONFIG['MOZ_ANDROID_HLS_SUPPORT']:
     gvjar.sources += [geckoview_source_dir + 'java/org/mozilla/gecko/' + x for x in [
         'media/GeckoAudioInfo.java',
+        'media/GeckoHlsAudioRenderer.java',
+        'media/GeckoHlsPlayer.java',
+        'media/GeckoHlsRendererBase.java',
         'media/GeckoHlsSample.java',
+        'media/GeckoHlsVideoRenderer.java',
         'media/GeckoVideoInfo.java',
+        'media/Utils.java',
     ]]
 
 
 gvjar.extra_jars += [
     CONFIG['ANDROID_SUPPORT_ANNOTATIONS_JAR_LIB'],
     CONFIG['ANDROID_SUPPORT_V4_AAR_LIB'],
     CONFIG['ANDROID_SUPPORT_V4_AAR_INTERNAL_LIB'],
     'constants.jar',
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsAudioRenderer.java
@@ -0,0 +1,162 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+package org.mozilla.gecko.media;
+
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaCodec.CryptoInfo;
+import android.os.Handler;
+import android.util.Log;
+
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.RendererCapabilities;
+import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
+import com.google.android.exoplayer2.mediacodec.MediaCodecInfo;
+import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
+import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
+import com.google.android.exoplayer2.util.MimeTypes;
+
+import java.nio.ByteBuffer;
+
+import org.mozilla.gecko.AppConstants.Versions;
+
+public class GeckoHlsAudioRenderer extends GeckoHlsRendererBase {
+    public GeckoHlsAudioRenderer(GeckoHlsPlayer.ComponentEventDispatcher eventDispatcher) {
+        super(C.TRACK_TYPE_AUDIO, eventDispatcher);
+        assertTrue(Versions.feature16Plus);
+        LOGTAG = getClass().getSimpleName();
+        DEBUG = false;
+    }
+
+    @Override
+    public final int supportsFormat(Format format) {
+        /*
+         * FORMAT_EXCEEDS_CAPABILITIES : The Renderer is capable of rendering
+         *                               formats with the same mime type, but
+         *                               the properties of the format exceed
+         *                               the renderer's capability.
+         * FORMAT_UNSUPPORTED_SUBTYPE : The Renderer is a general purpose
+         *                              renderer for formats of the same
+         *                              top-level type, but is not capable of
+         *                              rendering the format or any other format
+         *                              with the same mime type because the
+         *                              sub-type is not supported.
+         * FORMAT_UNSUPPORTED_TYPE : The Renderer is not capable of rendering
+         *                           the format, either because it does not support
+         *                           the format's top-level type, or because it's
+         *                           a specialized renderer for a different mime type.
+         * ADAPTIVE_NOT_SEAMLESS : The Renderer can adapt between formats,
+         *                         but may suffer a brief discontinuity (~50-100ms)
+         *                         when adaptation occurs.
+         */
+        String mimeType = format.sampleMimeType;
+        if (!MimeTypes.isAudio(mimeType)) {
+            return RendererCapabilities.FORMAT_UNSUPPORTED_TYPE;
+        }
+        MediaCodecInfo decoderInfo = null;
+        try {
+            MediaCodecSelector mediaCodecSelector = MediaCodecSelector.DEFAULT;
+            decoderInfo = mediaCodecSelector.getDecoderInfo(mimeType, false);
+        } catch (MediaCodecUtil.DecoderQueryException e) {
+            Log.e(LOGTAG, e.getMessage());
+        }
+        if (decoderInfo == null) {
+            return RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE;
+        }
+        /*
+         *  Note : If the code can make it to this place, ExoPlayer assumes
+         *         support for unknown sampleRate and channelCount when
+         *         SDK version is less than 21, otherwise, further check is needed
+         *         if there's no sampleRate/channelCount in format.
+         */
+        boolean decoderCapable = Versions.preLollipop ||
+                                 ((format.sampleRate == Format.NO_VALUE ||
+                                  decoderInfo.isAudioSampleRateSupportedV21(format.sampleRate)) &&
+                                 (format.channelCount == Format.NO_VALUE ||
+                                  decoderInfo.isAudioChannelCountSupportedV21(format.channelCount)));
+        int formatSupport = decoderCapable ?
+            RendererCapabilities.FORMAT_HANDLED :
+            RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES;
+        return RendererCapabilities.ADAPTIVE_NOT_SEAMLESS | formatSupport;
+    }
+
+    @Override
+    protected final void createInputBuffer() {
+        // We're not able to estimate the size for audio from format. So we rely
+        // on the dynamic allocation mechanism provided in DecoderInputBuffer.
+        mInputBuffer = null;
+    }
+
+    @Override
+    protected void resetRenderer() {
+        mInputBuffer = null;
+        mInitialized = false;
+    }
+
+    @Override
+    protected void handleReconfiguration(DecoderInputBuffer bufferForRead) {
+        // Do nothing
+    }
+
+    @Override
+    protected void handleFormatRead(DecoderInputBuffer bufferForRead) {
+        onInputFormatChanged(mFormatHolder.format);
+    }
+
+    @Override
+    protected void handleEndOfStream(DecoderInputBuffer bufferForRead) {
+        mInputStreamEnded = true;
+        mDemuxedInputSamples.offer(GeckoHlsSample.EOS);
+    }
+
+    @Override
+    protected void handleSamplePreparation(DecoderInputBuffer bufferForRead) {
+        int size = bufferForRead.data.limit();
+        byte[] realData = new byte[size];
+        bufferForRead.data.get(realData, 0, size);
+        ByteBuffer buffer = ByteBuffer.wrap(realData);
+        mInputBuffer = bufferForRead.data;
+        mInputBuffer.clear();
+
+        CryptoInfo cryptoInfo = bufferForRead.isEncrypted() ? bufferForRead.cryptoInfo.getFrameworkCryptoInfoV16() : null;
+        BufferInfo bufferInfo = new BufferInfo();
+        // Flags in DecoderInputBuffer are synced with MediaCodec Buffer flags.
+        int flags = 0;
+        flags |= bufferForRead.isKeyFrame() ? MediaCodec.BUFFER_FLAG_KEY_FRAME : 0;
+        flags |= bufferForRead.isEndOfStream() ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0;
+        bufferInfo.set(0, size, bufferForRead.timeUs, flags);
+
+        assertTrue(mFormats.size() >= 0);
+        // We add a new format in the list once format changes, so the formatIndex
+        // should indicate to the last(latest) format.
+        GeckoHlsSample sample = GeckoHlsSample.create(buffer,
+                                                      bufferInfo,
+                                                      cryptoInfo,
+                                                      mFormats.size() - 1);
+
+        mDemuxedInputSamples.offer(sample);
+
+        if (DEBUG) {
+            Log.d(LOGTAG, "Demuxed sample PTS : " +
+                          sample.info.presentationTimeUs + ", duration :" +
+                          sample.duration + ", formatIndex(" +
+                          sample.formatIndex + "), queue size : " +
+                          mDemuxedInputSamples.size());
+        }
+    }
+
+    @Override
+    protected boolean clearInputSamplesQueue() {
+        if (DEBUG) { Log.d(LOGTAG, "clearInputSamplesQueue"); }
+        mDemuxedInputSamples.clear();
+        return true;
+    }
+
+    @Override
+    protected void notifyPlayerInputFormatChanged(Format newFormat) {
+        mPlayerEventDispatcher.onAudioInputFormatChanged(newFormat);
+    }
+}
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsPlayer.java
@@ -0,0 +1,659 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+package org.mozilla.gecko.media;
+
+import android.content.Context;
+import android.net.Uri;
+import android.os.Handler;
+import android.text.TextUtils;
+import android.util.Log;
+import android.view.Surface;
+
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.ExoPlaybackException;
+import com.google.android.exoplayer2.ExoPlayer;
+import com.google.android.exoplayer2.ExoPlayerFactory;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.PlaybackParameters;
+import com.google.android.exoplayer2.RendererCapabilities;
+import com.google.android.exoplayer2.Timeline;
+import com.google.android.exoplayer2.decoder.DecoderCounters;
+import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
+import com.google.android.exoplayer2.source.MediaSource;
+import com.google.android.exoplayer2.source.TrackGroup;
+import com.google.android.exoplayer2.source.TrackGroupArray;
+import com.google.android.exoplayer2.source.hls.HlsMediaSource;
+import com.google.android.exoplayer2.trackselection.AdaptiveTrackSelection;
+import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
+import com.google.android.exoplayer2.trackselection.MappingTrackSelector.MappedTrackInfo;
+import com.google.android.exoplayer2.trackselection.TrackSelection;
+import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
+import com.google.android.exoplayer2.upstream.DataSource;
+import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
+import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
+import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory;
+import com.google.android.exoplayer2.upstream.HttpDataSource;
+import com.google.android.exoplayer2.util.Util;
+
+import org.mozilla.gecko.AppConstants;
+import org.mozilla.gecko.GeckoAppShell;
+
+import java.util.concurrent.ConcurrentLinkedQueue;
+
+public class GeckoHlsPlayer implements ExoPlayer.EventListener {
+    private static final String LOGTAG = "GeckoHlsPlayer";
+    private static final DefaultBandwidthMeter BANDWIDTH_METER = new DefaultBandwidthMeter();
+    private static final int MAX_TIMELINE_ITEM_LINES = 3;
+    private static boolean DEBUG = false;
+
+    private DataSource.Factory mMediaDataSourceFactory;
+
+    private Handler mMainHandler;
+    private ExoPlayer mPlayer;
+    private GeckoHlsRendererBase[] mRenderers;
+    private DefaultTrackSelector mTrackSelector;
+    private MediaSource mMediaSource;
+    private ComponentListener mComponentListener;
+    private ComponentEventDispatcher mComponentEventDispatcher;
+
+    private boolean mIsTimelineStatic = false;
+    private long mDurationUs;
+
+    private GeckoHlsVideoRenderer mVRenderer = null;
+    private GeckoHlsAudioRenderer mARenderer = null;
+
+    // Able to control if we only want V/A/V+A tracks from bitstream.
+    private class RendererController {
+        private final boolean mEnableV;
+        private final boolean mEnableA;
+        RendererController(boolean enableVideoRenderer, boolean enableAudioRenderer) {
+            this.mEnableV = enableVideoRenderer;
+            this.mEnableA = enableAudioRenderer;
+        }
+        boolean isVideoRendererEnabled() { return mEnableV; }
+        boolean isAudioRendererEnabled() { return mEnableA; }
+    }
+    private RendererController mRendererController = new RendererController(true, true);
+
+    // Provide statistical information of tracks.
+    private class HlsMediaTracksInfo {
+        private int mNumVideoTracks = 0;
+        private int mNumAudioTracks = 0;
+        private boolean mVideoInfoUpdated = false;
+        private boolean mAudioInfoUpdated = false;
+        HlsMediaTracksInfo(int numVideoTracks, int numAudioTracks) {
+            this.mNumVideoTracks = numVideoTracks;
+            this.mNumAudioTracks = numAudioTracks;
+        }
+        public boolean hasVideo() { return mNumVideoTracks > 0; }
+        public boolean hasAudio() { return mNumAudioTracks > 0; }
+        public int getNumOfVideoTracks() { return mNumVideoTracks; }
+        public int getNumOfAudioTracks() { return mNumAudioTracks; }
+        public void onVideoInfoUpdated() { mVideoInfoUpdated = true; }
+        public void onAudioInfoUpdated() { mAudioInfoUpdated = true; }
+        public boolean videoReady() {
+            return hasVideo() ? mVideoInfoUpdated : true;
+        }
+        public boolean audioReady() {
+            return hasAudio() ? mAudioInfoUpdated : true;
+        }
+    }
+    private HlsMediaTracksInfo mTracksInfo = null;
+
+    private boolean mIsPlayerInitDone = false;
+    private boolean mIsDemuxerInitDone = false;
+    private DemuxerCallbacks mDemuxerCallbacks;
+    private ResourceCallbacks mResourceCallbacks;
+
+    public enum TrackType {
+        UNDEFINED,
+        AUDIO,
+        VIDEO,
+        TEXT,
+    }
+
+    public enum ResourceError {
+        BASE(-100),
+        UNKNOWN(-101),
+        PLAYER(-102),
+        UNSUPPORTED(-103);
+
+        private int mNumVal;
+        private ResourceError(int numVal) {
+            mNumVal = numVal;
+        }
+        public int code() {
+            return mNumVal;
+        }
+    }
+
+    public enum DemuxerError {
+        BASE(-200),
+        UNKNOWN(-201),
+        PLAYER(-202),
+        UNSUPPORTED(-203);
+
+        private int mNumVal;
+        private DemuxerError(int numVal) {
+            mNumVal = numVal;
+        }
+        public int code() {
+            return mNumVal;
+        }
+    }
+
+    public interface DemuxerCallbacks {
+        void onInitialized(boolean hasAudio, boolean hasVideo);
+        void onError(int errorCode);
+    }
+
+    public interface ResourceCallbacks {
+        void onDataArrived();
+        void onError(int errorCode);
+    }
+
+    private static void assertTrue(boolean condition) {
+      if (DEBUG && !condition) {
+        throw new AssertionError("Expected condition to be true");
+      }
+    }
+
+    public void checkInitDone() {
+        assertTrue(mDemuxerCallbacks != null);
+        assertTrue(mTracksInfo != null);
+        if (mIsDemuxerInitDone) {
+            return;
+        }
+        if (DEBUG) {
+            Log.d(LOGTAG, "[checkInitDone] VReady:" + mTracksInfo.videoReady() +
+                    ",AReady:" + mTracksInfo.audioReady() +
+                    ",hasV:" + mTracksInfo.hasVideo() +
+                    ",hasA:" + mTracksInfo.hasAudio());
+        }
+        if (mTracksInfo.videoReady() && mTracksInfo.audioReady()) {
+            mDemuxerCallbacks.onInitialized(mTracksInfo.hasAudio(), mTracksInfo.hasVideo());
+            mIsDemuxerInitDone = true;
+        }
+    }
+
+    public final class ComponentEventDispatcher {
+        public void onDataArrived() {
+            assertTrue(mMainHandler != null);
+            assertTrue(mComponentListener != null);
+            if (!mIsPlayerInitDone) {
+                return;
+            }
+            if (mMainHandler != null && mComponentListener != null) {
+                mMainHandler.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        mComponentListener.onDataArrived();
+                    }
+                });
+            }
+        }
+
+        public void onVideoInputFormatChanged(final Format format) {
+            assertTrue(mMainHandler != null);
+            assertTrue(mComponentListener != null);
+            if (!mIsPlayerInitDone) {
+                return;
+            }
+            if (mMainHandler != null && mComponentListener != null) {
+                mMainHandler.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        mComponentListener.onVideoInputFormatChanged(format);
+                    }
+                });
+            }
+        }
+
+        public void onAudioInputFormatChanged(final Format format) {
+            assertTrue(mMainHandler != null);
+            assertTrue(mComponentListener != null);
+            if (!mIsPlayerInitDone) {
+                return;
+            }
+            if (mMainHandler != null && mComponentListener != null) {
+                mMainHandler.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        mComponentListener.onAudioInputFormatChanged(format);
+                    }
+                });
+            }
+        }
+    }
+
+    public final class ComponentListener {
+
+        // General purpose implementation
+        public void onDataArrived() {
+            assertTrue(mResourceCallbacks != null);
+            Log.d(LOGTAG, "[CB][onDataArrived]");
+            mResourceCallbacks.onDataArrived();
+        }
+
+        public void onVideoInputFormatChanged(Format format) {
+            assertTrue(mTracksInfo != null);
+            if (DEBUG) {
+                Log.d(LOGTAG, "[CB] onVideoInputFormatChanged [" + format + "]");
+                Log.d(LOGTAG, "[CB] SampleMIMEType [" +
+                              format.sampleMimeType + "], ContainerMIMEType [" +
+                              format.containerMimeType + "]");
+            }
+            mTracksInfo.onVideoInfoUpdated();
+            checkInitDone();
+        }
+
+        public void onAudioInputFormatChanged(Format format) {
+            assertTrue(mTracksInfo != null);
+            if (DEBUG) { Log.d(LOGTAG, "[CB] onAudioInputFormatChanged [" + format + "]"); }
+            mTracksInfo.onAudioInfoUpdated();
+            checkInitDone();
+        }
+    }
+
+    public DataSource.Factory buildDataSourceFactory(Context ctx, DefaultBandwidthMeter bandwidthMeter) {
+        return new DefaultDataSourceFactory(ctx, bandwidthMeter,
+                buildHttpDataSourceFactory(bandwidthMeter));
+    }
+
+    public HttpDataSource.Factory buildHttpDataSourceFactory(DefaultBandwidthMeter bandwidthMeter) {
+        return new DefaultHttpDataSourceFactory(AppConstants.USER_AGENT_FENNEC_MOBILE, bandwidthMeter);
+    }
+
+    private MediaSource buildMediaSource(Uri uri, String overrideExtension) {
+        if (DEBUG) { Log.d(LOGTAG, "buildMediaSource uri[" + uri + "]" + ", overridedExt[" + overrideExtension + "]"); }
+        int type = Util.inferContentType(TextUtils.isEmpty(overrideExtension)
+                                         ? uri.getLastPathSegment()
+                                         : "." + overrideExtension);
+        switch (type) {
+            case C.TYPE_HLS:
+                return new HlsMediaSource(uri, mMediaDataSourceFactory, mMainHandler, null);
+            default:
+                mResourceCallbacks.onError(ResourceError.UNSUPPORTED.code());
+                throw new IllegalArgumentException("Unsupported type: " + type);
+        }
+    }
+
+    GeckoHlsPlayer() {
+        if (DEBUG) { Log.d(LOGTAG, " construct"); }
+    }
+
+    void addResourceWrapperCallbackListener(ResourceCallbacks callback) {
+        if (DEBUG) { Log.d(LOGTAG, " addResourceWrapperCallbackListener ..."); }
+        mResourceCallbacks = callback;
+    }
+
+    void addDemuxerWrapperCallbackListener(DemuxerCallbacks callback) {
+        if (DEBUG) { Log.d(LOGTAG, " addDemuxerWrapperCallbackListener ..."); }
+        mDemuxerCallbacks = callback;
+    }
+
+    @Override
+    public void onLoadingChanged(boolean isLoading) {
+        if (DEBUG) { Log.d(LOGTAG, "loading [" + isLoading + "]"); }
+        if (!isLoading) {
+            // To update buffered position.
+            mComponentEventDispatcher.onDataArrived();
+        }
+    }
+
+    @Override
+    public void onPlayerStateChanged(boolean playWhenReady, int state) {
+        if (DEBUG) { Log.d(LOGTAG, "state [" + playWhenReady + ", " + getStateString(state) + "]"); }
+        if (state == ExoPlayer.STATE_READY) {
+            mPlayer.setPlayWhenReady(true);
+        }
+    }
+
+    @Override
+    public void onPositionDiscontinuity() {
+        if (DEBUG) { Log.d(LOGTAG, "positionDiscontinuity"); }
+    }
+
+    @Override
+    public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
+        if (DEBUG) {
+            Log.d(LOGTAG, "playbackParameters " +
+                  String.format("[speed=%.2f, pitch=%.2f]", playbackParameters.speed, playbackParameters.pitch));
+        }
+    }
+
+    @Override
+    public void onPlayerError(ExoPlaybackException e) {
+        if (DEBUG) { Log.e(LOGTAG, "playerFailed" , e); }
+        if (mResourceCallbacks != null) {
+            mResourceCallbacks.onError(ResourceError.PLAYER.code());
+        }
+        if (mDemuxerCallbacks != null) {
+            mDemuxerCallbacks.onError(DemuxerError.PLAYER.code());
+        }
+    }
+
+    @Override
+    public synchronized void onTracksChanged(TrackGroupArray ignored, TrackSelectionArray trackSelections) {
+        if (DEBUG) {
+            Log.d(LOGTAG, "onTracksChanged : TGA[" + ignored +
+                          "], TSA[" + trackSelections + "]");
+
+            MappedTrackInfo mappedTrackInfo = mTrackSelector.getCurrentMappedTrackInfo();
+            if (mappedTrackInfo == null) {
+              Log.d(LOGTAG, "Tracks []");
+              return;
+            }
+            Log.d(LOGTAG, "Tracks [");
+            // Log tracks associated to renderers.
+            for (int rendererIndex = 0; rendererIndex < mappedTrackInfo.length; rendererIndex++) {
+              TrackGroupArray rendererTrackGroups = mappedTrackInfo.getTrackGroups(rendererIndex);
+              TrackSelection trackSelection = trackSelections.get(rendererIndex);
+              if (rendererTrackGroups.length > 0) {
+                Log.d(LOGTAG, "  Renderer:" + rendererIndex + " [");
+                for (int groupIndex = 0; groupIndex < rendererTrackGroups.length; groupIndex++) {
+                  TrackGroup trackGroup = rendererTrackGroups.get(groupIndex);
+                  String adaptiveSupport = getAdaptiveSupportString(trackGroup.length,
+                          mappedTrackInfo.getAdaptiveSupport(rendererIndex, groupIndex, false));
+                  Log.d(LOGTAG, "    Group:" + groupIndex + ", adaptive_supported=" + adaptiveSupport + " [");
+                  for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
+                    String status = getTrackStatusString(trackSelection, trackGroup, trackIndex);
+                    String formatSupport = getFormatSupportString(
+                            mappedTrackInfo.getTrackFormatSupport(rendererIndex, groupIndex, trackIndex));
+                    Log.d(LOGTAG, "      " + status + " Track:" + trackIndex +
+                                  ", " + Format.toLogString(trackGroup.getFormat(trackIndex)) +
+                                  ", supported=" + formatSupport);
+                  }
+                  Log.d(LOGTAG, "    ]");
+                }
+                Log.d(LOGTAG, "  ]");
+              }
+            }
+            // Log tracks not associated with a renderer.
+            TrackGroupArray unassociatedTrackGroups = mappedTrackInfo.getUnassociatedTrackGroups();
+            if (unassociatedTrackGroups.length > 0) {
+              Log.d(LOGTAG, "  Renderer:None [");
+              for (int groupIndex = 0; groupIndex < unassociatedTrackGroups.length; groupIndex++) {
+                Log.d(LOGTAG, "    Group:" + groupIndex + " [");
+                TrackGroup trackGroup = unassociatedTrackGroups.get(groupIndex);
+                for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
+                  String status = getTrackStatusString(false);
+                  String formatSupport = getFormatSupportString(
+                          RendererCapabilities.FORMAT_UNSUPPORTED_TYPE);
+                  Log.d(LOGTAG, "      " + status + " Track:" + trackIndex +
+                                ", " + Format.toLogString(trackGroup.getFormat(trackIndex)) +
+                                ", supported=" + formatSupport);
+                }
+                Log.d(LOGTAG, "    ]");
+              }
+              Log.d(LOGTAG, "  ]");
+            }
+            Log.d(LOGTAG, "]");
+        }
+        mTracksInfo = null;
+        int numVideoTracks = 0;
+        int numAudioTracks = 0;
+        for (int j = 0; j < ignored.length; j++) {
+            TrackGroup tg = ignored.get(j);
+            for (int i = 0; i < tg.length; i++) {
+                Format fmt = tg.getFormat(i);
+                if (fmt.sampleMimeType != null) {
+                    if (mRendererController.isVideoRendererEnabled() &&
+                        fmt.sampleMimeType.startsWith(new String("video"))) {
+                        numVideoTracks++;
+                    } else if (mRendererController.isAudioRendererEnabled() &&
+                               fmt.sampleMimeType.startsWith(new String("audio"))) {
+                        numAudioTracks++;
+                    }
+                }
+            }
+        }
+        mTracksInfo = new HlsMediaTracksInfo(numVideoTracks, numAudioTracks);
+    }
+
+    @Override
+    public void onTimelineChanged(Timeline timeline, Object manifest) {
+        // For now, we use the interface ExoPlayer.getDuration() for gecko,
+        // so here we create local variable 'window' & 'peroid' to obtain
+        // the dynamic duration.
+        // See. http://google.github.io/ExoPlayer/doc/reference/com/google/android/exoplayer2/Timeline.html
+        // for further information.
+        Timeline.Window window = new Timeline.Window();
+        mIsTimelineStatic = !timeline.isEmpty()
+                && !timeline.getWindow(timeline.getWindowCount() - 1, window).isDynamic;
+
+        int periodCount = timeline.getPeriodCount();
+        int windowCount = timeline.getWindowCount();
+        if (DEBUG) { Log.d(LOGTAG, "sourceInfo [periodCount=" + periodCount + ", windowCount=" + windowCount); }
+        Timeline.Period period = new Timeline.Period();
+        for (int i = 0; i < Math.min(periodCount, MAX_TIMELINE_ITEM_LINES); i++) {
+          timeline.getPeriod(i, period);
+          if (mDurationUs < period.getDurationUs()) {
+              mDurationUs = period.getDurationUs();
+          }
+        }
+        for (int i = 0; i < Math.min(windowCount, MAX_TIMELINE_ITEM_LINES); i++) {
+          timeline.getWindow(i, window);
+          if (mDurationUs < window.getDurationUs()) {
+              mDurationUs = window.getDurationUs();
+          }
+        }
+        // TODO : Need to check if the duration from play.getDuration is different
+        // with the one calculated from multi-timelines/windows.
+        if (DEBUG) {
+            Log.d(LOGTAG, "Media duration (from Timeline) = " + mDurationUs +
+                          "(us)" + " player.getDuration() = " + mPlayer.getDuration() +
+                          "(ms)");
+        }
+    }
+
+    private static String getStateString(int state) {
+        switch (state) {
+            case ExoPlayer.STATE_BUFFERING:
+                return "B";
+            case ExoPlayer.STATE_ENDED:
+                return "E";
+            case ExoPlayer.STATE_IDLE:
+                return "I";
+            case ExoPlayer.STATE_READY:
+                return "R";
+            default:
+                return "?";
+        }
+    }
+
+    private static String getFormatSupportString(int formatSupport) {
+        switch (formatSupport) {
+          case RendererCapabilities.FORMAT_HANDLED:
+            return "YES";
+          case RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES:
+            return "NO_EXCEEDS_CAPABILITIES";
+          case RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE:
+            return "NO_UNSUPPORTED_TYPE";
+          case RendererCapabilities.FORMAT_UNSUPPORTED_TYPE:
+            return "NO";
+          default:
+            return "?";
+        }
+      }
+
+    private static String getAdaptiveSupportString(int trackCount, int adaptiveSupport) {
+        if (trackCount < 2) {
+          return "N/A";
+        }
+        switch (adaptiveSupport) {
+          case RendererCapabilities.ADAPTIVE_SEAMLESS:
+            return "YES";
+          case RendererCapabilities.ADAPTIVE_NOT_SEAMLESS:
+            return "YES_NOT_SEAMLESS";
+          case RendererCapabilities.ADAPTIVE_NOT_SUPPORTED:
+            return "NO";
+          default:
+            return "?";
+        }
+      }
+
+      private static String getTrackStatusString(TrackSelection selection, TrackGroup group,
+                                                 int trackIndex) {
+        return getTrackStatusString(selection != null && selection.getTrackGroup() == group
+                && selection.indexOf(trackIndex) != C.INDEX_UNSET);
+      }
+
+      private static String getTrackStatusString(boolean enabled) {
+        return enabled ? "[X]" : "[ ]";
+      }
+
+    // =======================================================================
+    // API for GeckoHlsResourceWrapper
+    // =======================================================================
+    synchronized void init(String url) {
+        if (DEBUG) { Log.d(LOGTAG, " init"); }
+        assertTrue(mResourceCallbacks != null);
+        if (mIsPlayerInitDone) {
+            return;
+        }
+        Context ctx = GeckoAppShell.getApplicationContext();
+        mComponentListener = new ComponentListener();
+        mComponentEventDispatcher = new ComponentEventDispatcher();
+        mMainHandler = new Handler();
+
+        mDurationUs = 0;
+
+        // Prepare trackSelector
+        TrackSelection.Factory videoTrackSelectionFactory =
+                new AdaptiveTrackSelection.Factory(BANDWIDTH_METER);
+        mTrackSelector = new DefaultTrackSelector(videoTrackSelectionFactory);
+
+        // Prepare customized renderer
+        mRenderers = new GeckoHlsRendererBase[2];
+        mVRenderer = new GeckoHlsVideoRenderer(mComponentEventDispatcher);
+        mARenderer = new GeckoHlsAudioRenderer(mComponentEventDispatcher);
+        mRenderers[0] = mVRenderer;
+        mRenderers[1] = mARenderer;
+
+        // Create ExoPlayer instance with specific components.
+        mPlayer = ExoPlayerFactory.newInstance(mRenderers, mTrackSelector);
+        mPlayer.addListener(this);
+
+        Uri uri = Uri.parse(url);
+        mMediaDataSourceFactory = buildDataSourceFactory(ctx, BANDWIDTH_METER);
+        mMediaSource = buildMediaSource(uri, null);
+
+        mPlayer.prepare(mMediaSource);
+        mIsPlayerInitDone = true;
+    }
+
+    public boolean isLiveStream() {
+        return !mIsTimelineStatic;
+    }
+
+    // =======================================================================
+    // API for GeckoHlsDemuxerWrapper
+    // =======================================================================
+    public ConcurrentLinkedQueue<GeckoHlsSample> getVideoSamples(int number) {
+        return mVRenderer != null ? mVRenderer.getQueuedSamples(number) :
+                                    new ConcurrentLinkedQueue<GeckoHlsSample>();
+    }
+
+    public ConcurrentLinkedQueue<GeckoHlsSample> getAudioSamples(int number) {
+        return mARenderer != null ? mARenderer.getQueuedSamples(number) :
+                                    new ConcurrentLinkedQueue<GeckoHlsSample>();
+    }
+
+    public long getDuration() {
+        assertTrue(mPlayer != null);
+        // Value returned by getDuration() is in milliseconds.
+        long duration = mPlayer.getDuration() * 1000;
+        if (DEBUG) { Log.d(LOGTAG, "getDuration : " + duration  + "(Us)"); }
+        return duration;
+    }
+
+    public long getBufferedPosition() {
+        assertTrue(mPlayer != null);
+        // Value returned by getBufferedPosition() is in milliseconds.
+        long bufferedPos = mPlayer.getBufferedPosition() * 1000;
+        if (DEBUG) { Log.d(LOGTAG, "getBufferedPosition : " + bufferedPos + "(Us)"); }
+        return bufferedPos;
+    }
+
+    public synchronized int getNumberOfTracks(TrackType trackType) {
+        if (DEBUG) { Log.d(LOGTAG, "getNumberOfTracks"); }
+        assertTrue(mTracksInfo != null);
+
+        if (trackType == TrackType.VIDEO) {
+            return mTracksInfo.getNumOfVideoTracks();
+        } else if (trackType == TrackType.AUDIO) {
+            return mTracksInfo.getNumOfAudioTracks();
+        }
+        return 0;
+    }
+
+    public Format getVideoTrackFormat(int index) {
+        if (DEBUG) { Log.d(LOGTAG, "getVideoTrackFormat"); }
+        assertTrue(mVRenderer != null);
+        assertTrue(mTracksInfo != null);
+        return mTracksInfo.hasVideo() ? mVRenderer.getFormat(index) : null;
+    }
+
+    public Format getAudioTrackFormat(int index) {
+        if (DEBUG) { Log.d(LOGTAG, "getAudioTrackFormat"); }
+        assertTrue(mARenderer != null);
+        assertTrue(mTracksInfo != null);
+        return mTracksInfo.hasAudio() ? mARenderer.getFormat(index) : null;
+    }
+
+    public boolean seek(long positionUs) {
+        // positionUs : microseconds.
+        // NOTE : 1) It's not possible to seek media by tracktype via ExoPlayer Interface.
+        //        2) positionUs is samples PTS from MFR, we need to re-adjust it
+        //           for ExoPlayer by subtracting sample start time.
+        //        3) Time unit for ExoPlayer.seek() is milliseconds.
+        try {
+            // TODO : Gather Timeline Period / Window information to develop
+            //        complete timeline, and seekTime should be inside the duration.
+            Long startTime = Long.MAX_VALUE;
+            for (GeckoHlsRendererBase r : mRenderers) {
+                if (r == mVRenderer && mRendererController.isVideoRendererEnabled() ||
+                    r == mARenderer && mRendererController.isAudioRendererEnabled()) {
+                // Find the min value of the start time
+                    startTime = Math.min(startTime, r.getFirstSamplePTS());
+                }
+            }
+            if (DEBUG) {
+                Log.d(LOGTAG, "seeking  : " + positionUs / 1000 +
+                              " (ms); startTime : " + startTime / 1000 + " (ms)");
+            }
+            assertTrue(startTime != Long.MAX_VALUE);
+            mPlayer.seekTo(positionUs / 1000 - startTime / 1000);
+        } catch (Exception e) {
+            mDemuxerCallbacks.onError(DemuxerError.UNKNOWN.code());
+            return false;
+        }
+        return true;
+    }
+
+    public long getNextKeyFrameTime() {
+        long nextKeyFrameTime = mVRenderer != null
+            ? mVRenderer.getNextKeyFrameTime()
+            : Long.MAX_VALUE;
+        return nextKeyFrameTime;
+    }
+
+    public void release() {
+        if (DEBUG) { Log.d(LOGTAG, "releasing  ..."); }
+        if (mPlayer != null) {
+            mPlayer.removeListener(this);
+            mPlayer.stop();
+            mPlayer.release();
+            mVRenderer = null;
+            mARenderer = null;
+            mPlayer = null;
+        }
+        mDemuxerCallbacks = null;
+        mResourceCallbacks = null;
+        mIsPlayerInitDone = false;
+        mIsDemuxerInitDone = false;
+    }
+}
\ No newline at end of file
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsRendererBase.java
@@ -0,0 +1,300 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+package org.mozilla.gecko.media;
+
+import android.util.Log;
+
+import com.google.android.exoplayer2.BaseRenderer;
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.FormatHolder;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.Iterator;
+
+public abstract class GeckoHlsRendererBase extends BaseRenderer {
+    protected static final int QUEUED_INPUT_SAMPLE_DURATION_THRESHOLD = 1000000; //1sec
+    protected final FormatHolder mFormatHolder = new FormatHolder();
+    /*
+     *  DEBUG/LOGTAG will be set in the 2 subclass GeckoHlsAudioRenderer and
+     *  GeckoHlsVideoRenderer, and we still wants to log message in the base class
+     *  GeckoHlsRendererBase, so neither 'static' nor 'final' are applied to them.
+     */
+    protected boolean DEBUG;
+    protected String LOGTAG;
+    // Notify GeckoHlsPlayer about renderer's status, i.e. data has arrived.
+    protected GeckoHlsPlayer.ComponentEventDispatcher mPlayerEventDispatcher;
+
+    protected ConcurrentLinkedQueue<GeckoHlsSample> mDemuxedInputSamples = new ConcurrentLinkedQueue<>();
+
+    protected ByteBuffer mInputBuffer = null;
+    protected ArrayList<Format> mFormats = new ArrayList<Format>();
+    protected boolean mInitialized = false;
+    protected boolean mWaitingForData = true;
+    protected boolean mInputStreamEnded = false;
+    protected long mFirstSampleStartTime = Long.MIN_VALUE;
+
+    protected abstract void createInputBuffer();
+    protected abstract void handleReconfiguration(DecoderInputBuffer bufferForRead);
+    protected abstract void handleFormatRead(DecoderInputBuffer bufferForRead);
+    protected abstract void handleEndOfStream(DecoderInputBuffer bufferForRead);
+    protected abstract void handleSamplePreparation(DecoderInputBuffer bufferForRead);
+    protected abstract void resetRenderer();
+    protected abstract boolean clearInputSamplesQueue();
+    protected abstract void notifyPlayerInputFormatChanged(Format newFormat);
+
+    protected void assertTrue(boolean condition) {
+        if (DEBUG && !condition) {
+            throw new AssertionError("Expected condition to be true");
+        }
+    }
+
+    public GeckoHlsRendererBase(int trackType, GeckoHlsPlayer.ComponentEventDispatcher eventDispatcher) {
+        super(trackType);
+        mPlayerEventDispatcher = eventDispatcher;
+    }
+
+    private boolean isQueuedEnoughData() {
+        if (mDemuxedInputSamples.isEmpty()) {
+            return false;
+        }
+
+        Iterator<GeckoHlsSample> iter = mDemuxedInputSamples.iterator();
+        long firstPTS = 0;
+        if (iter.hasNext()) {
+            GeckoHlsSample sample = iter.next();
+            firstPTS = sample.info.presentationTimeUs;
+        }
+        long lastPTS = firstPTS;
+        while (iter.hasNext()) {
+            GeckoHlsSample sample = iter.next();
+            lastPTS = sample.info.presentationTimeUs;
+        }
+        return Math.abs(lastPTS - firstPTS) > QUEUED_INPUT_SAMPLE_DURATION_THRESHOLD;
+    }
+
+    public Format getFormat(int index) {
+        assertTrue(index >= 0);
+        Format fmt = index < mFormats.size() ? mFormats.get(index) : null;
+        if (DEBUG) { Log.d(LOGTAG, "getFormat : index = " + index + ", format : " + fmt); }
+        return fmt;
+    }
+
+    public long getFirstSamplePTS() { return mFirstSampleStartTime; }
+
+    public synchronized ConcurrentLinkedQueue<GeckoHlsSample> getQueuedSamples(int number) {
+        ConcurrentLinkedQueue<GeckoHlsSample> samples =
+            new ConcurrentLinkedQueue<GeckoHlsSample>();
+
+        int queuedSize = mDemuxedInputSamples.size();
+        for (int i = 0; i < queuedSize; i++) {
+            if (i >= number) {
+                break;
+            }
+            GeckoHlsSample sample = mDemuxedInputSamples.poll();
+            samples.offer(sample);
+        }
+        if (samples.isEmpty()) {
+            if (DEBUG) { Log.d(LOGTAG, "getQueuedSamples isEmpty, mWaitingForData = true !"); }
+            mWaitingForData = true;
+        } else if (mFirstSampleStartTime == Long.MIN_VALUE) {
+            mFirstSampleStartTime = samples.peek().info.presentationTimeUs;
+            if (DEBUG) { Log.d(LOGTAG, "mFirstSampleStartTime = " + mFirstSampleStartTime); }
+        }
+        return samples;
+    }
+
+    protected void handleDrmInitChanged(Format oldFormat, Format newFormat) {
+        Object oldDrmInit = oldFormat == null ? null : oldFormat.drmInitData;
+        Object newDrnInit = newFormat.drmInitData;
+
+        // TODO: Notify MFR if the content is encrypted or not.
+        if (newDrnInit != oldDrmInit) {
+            if (newDrnInit != null) {
+            } else {
+            }
+        }
+    }
+
+    protected boolean canReconfigure(Format oldFormat, Format newFormat) {
+        // Referring to ExoPlayer's MediaCodecBaseRenderer, the default is set
+        // to false. Only override it in video renderer subclass.
+        return false;
+    }
+
+    protected void prepareReconfiguration() {
+        // Referring to ExoPlayer's MediaCodec related renderers, only video
+        // renderer handles this.
+    }
+
+    protected void updateCSDInfo(Format format) {
+        // do nothing.
+    }
+
+    protected void onInputFormatChanged(Format newFormat) {
+        Format oldFormat;
+        try {
+            oldFormat = mFormats.get(mFormats.size() - 1);
+        } catch (IndexOutOfBoundsException e) {
+            oldFormat = null;
+        }
+        if (DEBUG) {
+            Log.d(LOGTAG, "[onInputFormatChanged] old : " + oldFormat +
+                  " => new : " + newFormat);
+        }
+        mFormats.add(newFormat);
+        handleDrmInitChanged(oldFormat, newFormat);
+
+        if (mInitialized && canReconfigure(oldFormat, newFormat)) {
+            prepareReconfiguration();
+        } else {
+            resetRenderer();
+            maybeInitRenderer();
+        }
+
+        updateCSDInfo(newFormat);
+        notifyPlayerInputFormatChanged(newFormat);
+    }
+
+    protected void maybeInitRenderer() {
+        if (mInitialized || mFormats.size() == 0) {
+            return;
+        }
+        if (DEBUG) { Log.d(LOGTAG, "Initializing ... "); }
+        createInputBuffer();
+        mInitialized = true;
+    }
+
+    /*
+     * The place we get demuxed data from HlsMediaSource(ExoPlayer).
+     * The data will then be converted to GeckoHlsSample and deliver to
+     * GeckoHlsDemuxerWrapper for further use.
+     * If the return value is ture, that means a GeckoHlsSample is queued
+     * successfully. We can try to feed more samples into queue.
+     * If the return value is false, that means we might encounter following
+     * situation 1) not initialized 2) input stream is ended 3) queue is full.
+     * 4) format changed. 5) exception happened.
+     */
+    protected synchronized boolean feedInputBuffersQueue() {
+        if (!mInitialized || mInputStreamEnded || isQueuedEnoughData()) {
+            // Need to reinitialize the renderer or the input stream has ended
+            // or we just reached the maximum queue size.
+            return false;
+        }
+
+        DecoderInputBuffer bufferForRead =
+            new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_NORMAL);
+        bufferForRead.data = mInputBuffer;
+        if (bufferForRead.data != null) {
+            bufferForRead.clear();
+        }
+
+        handleReconfiguration(bufferForRead);
+
+        // Read data from HlsMediaSource
+        int result = C.RESULT_NOTHING_READ;
+        try {
+            result = readSource(mFormatHolder, bufferForRead, false);
+        } catch (Exception e) {
+            Log.e(LOGTAG, "[feedInput] Exception when readSource :", e);
+            return false;
+        }
+
+        if (result == C.RESULT_NOTHING_READ) {
+            return false;
+        }
+
+        if (result == C.RESULT_FORMAT_READ) {
+            handleFormatRead(bufferForRead);
+            return true;
+        }
+
+        // We've read a buffer.
+        if (bufferForRead.isEndOfStream()) {
+            if (DEBUG) { Log.d(LOGTAG, "Now we're at the End Of Stream."); }
+            handleEndOfStream(bufferForRead);
+            return false;
+        }
+
+        bufferForRead.flip();
+
+        handleSamplePreparation(bufferForRead);
+
+        maybeNotifyDataArrived();
+        return true;
+    }
+
+    private void maybeNotifyDataArrived() {
+        if (mWaitingForData && isQueuedEnoughData()) {
+            if (DEBUG) { Log.d(LOGTAG, "onDataArrived"); }
+            mPlayerEventDispatcher.onDataArrived();
+            mWaitingForData = false;
+        }
+    }
+
+    private void readFormat() {
+        int result = readSource(mFormatHolder, null, true);
+        if (result == C.RESULT_FORMAT_READ) {
+            onInputFormatChanged(mFormatHolder.format);
+        }
+    }
+
+    @Override
+    protected void onEnabled(boolean joining) {
+        // Do nothing.
+    }
+
+    @Override
+    protected void onDisabled() {
+        mFormats.clear();
+        resetRenderer();
+    }
+
+    @Override
+    public boolean isReady() {
+        return mFormats.size() != 0;
+    }
+
+    @Override
+    public boolean isEnded() {
+        return mInputStreamEnded;
+    }
+
+    @Override
+    protected synchronized void onPositionReset(long positionUs, boolean joining) {
+        if (DEBUG) { Log.d(LOGTAG, "onPositionReset : positionUs = " + positionUs); }
+        mInputStreamEnded = false;
+        if (mInitialized) {
+            clearInputSamplesQueue();
+        }
+    }
+
+    /*
+     * This is called by ExoPlayerImplInternal.java.
+     * ExoPlayer checks the status of renderer, i.e. isReady() / isEnded(), and
+     * calls renderer.render by passing its wall clock time.
+     */
+    @Override
+    public void render(long positionUs, long elapsedRealtimeUs) {
+        if (DEBUG) {
+            Log.d(LOGTAG, "positionUs = " + positionUs +
+                          ", mInputStreamEnded = " + mInputStreamEnded);
+        }
+        if (mInputStreamEnded) {
+            return;
+        }
+        if (mFormats.size() == 0) {
+            readFormat();
+        }
+
+        maybeInitRenderer();
+        while (feedInputBuffersQueue()) {
+            // Do nothing
+        }
+    }
+}
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/GeckoHlsVideoRenderer.java
@@ -0,0 +1,463 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+package org.mozilla.gecko.media;
+
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaCodec.CryptoInfo;
+import android.os.Handler;
+import android.util.Log;
+
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
+import com.google.android.exoplayer2.mediacodec.MediaCodecInfo;
+import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
+import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
+import com.google.android.exoplayer2.RendererCapabilities;
+import com.google.android.exoplayer2.util.MimeTypes;
+
+import java.nio.ByteBuffer;
+import java.util.concurrent.ConcurrentLinkedQueue;
+
+import org.mozilla.gecko.AppConstants.Versions;
+
+public class GeckoHlsVideoRenderer extends GeckoHlsRendererBase {
+    /*
+     * By configuring these states, initialization data is provided for
+     * ExoPlayer's HlsMediaSource to parse HLS bitstream and then provide samples
+     * starting with an Access Unit Delimiter including SPS/PPS for TS,
+     * and provide samples starting with an AUD without SPS/PPS for FMP4.
+     */
+    private enum RECONFIGURATION_STATE {
+        NONE,
+        WRITE_PENDING,
+        QUEUE_PENDING
+    }
+    private boolean mRendererReconfigured;
+    private RECONFIGURATION_STATE mRendererReconfigurationState = RECONFIGURATION_STATE.NONE;
+
+    // A list of the formats which may be included in the bitstream.
+    private Format[] mStreamFormats;
+    // The max width/height/inputBufferSize for specific codec format.
+    private CodecMaxValues mCodecMaxValues;
+    // A temporary queue for samples whose duration is not calculated yet.
+    private ConcurrentLinkedQueue<GeckoHlsSample> mDemuxedNoDurationSamples =
+        new ConcurrentLinkedQueue<>();
+
+    // Contain CSD-0(SPS)/CSD-1(PPS) information (in AnnexB format) for
+    // prepending each keyframe. When video format changes, this information
+    // changes accordingly.
+    private byte[] mCSDInfo = null;
+
+    public GeckoHlsVideoRenderer(GeckoHlsPlayer.ComponentEventDispatcher eventDispatcher) {
+        super(C.TRACK_TYPE_VIDEO, eventDispatcher);
+        assertTrue(Versions.feature16Plus);
+        LOGTAG = getClass().getSimpleName();
+        DEBUG = false;
+    }
+
+    @Override
+    public final int supportsMixedMimeTypeAdaptation() {
+        return ADAPTIVE_NOT_SEAMLESS;
+    }
+
+    @Override
+    public final int supportsFormat(Format format) {
+        /*
+         * FORMAT_EXCEEDS_CAPABILITIES : The Renderer is capable of rendering
+         *                               formats with the same mime type, but
+         *                               the properties of the format exceed
+         *                               the renderer's capability.
+         * FORMAT_UNSUPPORTED_SUBTYPE : The Renderer is a general purpose
+         *                              renderer for formats of the same
+         *                              top-level type, but is not capable of
+         *                              rendering the format or any other format
+         *                              with the same mime type because the
+         *                              sub-type is not supported.
+         * FORMAT_UNSUPPORTED_TYPE : The Renderer is not capable of rendering
+         *                           the format, either because it does not support
+         *                           the format's top-level type, or because it's
+         *                           a specialized renderer for a different mime type.
+         * ADAPTIVE_NOT_SEAMLESS : The Renderer can adapt between formats,
+         *                         but may suffer a brief discontinuity (~50-100ms)
+         *                         when adaptation occurs.
+         * ADAPTIVE_SEAMLESS : The Renderer can seamlessly adapt between formats.
+         */
+        final String mimeType = format.sampleMimeType;
+        if (!MimeTypes.isVideo(mimeType)) {
+            return RendererCapabilities.FORMAT_UNSUPPORTED_TYPE;
+        }
+
+        MediaCodecInfo decoderInfo = null;
+        try {
+            MediaCodecSelector mediaCodecSelector = MediaCodecSelector.DEFAULT;
+            decoderInfo = mediaCodecSelector.getDecoderInfo(mimeType, false);
+        } catch (MediaCodecUtil.DecoderQueryException e) {
+            Log.e(LOGTAG, e.getMessage());
+        }
+        if (decoderInfo == null) {
+            return RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE;
+        }
+
+        boolean decoderCapable = decoderInfo.isCodecSupported(format.codecs);
+        if (decoderCapable && format.width > 0 && format.height > 0) {
+            if (Versions.preLollipop) {
+                try {
+                    decoderCapable = format.width * format.height <= MediaCodecUtil.maxH264DecodableFrameSize();
+                } catch (MediaCodecUtil.DecoderQueryException e) {
+                    Log.e(LOGTAG, e.getMessage());
+                }
+                if (!decoderCapable) {
+                    if (DEBUG) {
+                        Log.d(LOGTAG, "Check [legacyFrameSize, " +
+                                      format.width + "x" + format.height + "]");
+                    }
+                }
+            } else {
+                decoderCapable =
+                    decoderInfo.isVideoSizeAndRateSupportedV21(format.width,
+                                                               format.height,
+                                                               format.frameRate);
+            }
+        }
+
+        int adaptiveSupport = decoderInfo.adaptive ?
+            RendererCapabilities.ADAPTIVE_SEAMLESS :
+            RendererCapabilities.ADAPTIVE_NOT_SEAMLESS;
+        int formatSupport = decoderCapable ?
+            RendererCapabilities.FORMAT_HANDLED :
+            RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES;
+        return adaptiveSupport | formatSupport;
+    }
+
+    @Override
+    protected final void createInputBuffer() {
+        assertTrue(mFormats.size() > 0);
+        // Calculate maximum size which might be used for target format.
+        Format currentFormat = mFormats.get(mFormats.size() - 1);
+        mCodecMaxValues = getCodecMaxValues(currentFormat, mStreamFormats);
+        // Create a buffer with maximal size for reading source.
+        // Note : Though we are able to dynamically enlarge buffer size by
+        // creating DecoderInputBuffer with specific BufferReplacementMode, we
+        // still allocate a calculated max size buffer for it at first to reduce
+        // runtime overhead.
+        mInputBuffer = ByteBuffer.wrap(new byte[mCodecMaxValues.inputSize]);
+    }
+
+    @Override
+    protected void resetRenderer() {
+        if (DEBUG) { Log.d(LOGTAG, "[resetRenderer] mInitialized = " + mInitialized); }
+        if (mInitialized) {
+            mRendererReconfigured = false;
+            mRendererReconfigurationState = RECONFIGURATION_STATE.NONE;
+            mInputBuffer = null;
+            mCSDInfo = null;
+            mInitialized = false;
+        }
+    }
+
+    @Override
+    protected void handleReconfiguration(DecoderInputBuffer bufferForRead) {
+        // For adaptive reconfiguration OMX decoders expect all reconfiguration
+        // data to be supplied at the start of the buffer that also contains
+        // the first frame in the new format.
+        assertTrue(mFormats.size() > 0);
+        if (mRendererReconfigurationState == RECONFIGURATION_STATE.WRITE_PENDING) {
+            if (DEBUG) { Log.d(LOGTAG, "[feedInput][WRITE_PENDING] put initialization data"); }
+            Format currentFormat = mFormats.get(mFormats.size() - 1);
+            for (int i = 0; i < currentFormat.initializationData.size(); i++) {
+                byte[] data = currentFormat.initializationData.get(i);
+                bufferForRead.data.put(data);
+            }
+            mRendererReconfigurationState = RECONFIGURATION_STATE.QUEUE_PENDING;
+        }
+    }
+
+    @Override
+    protected void handleFormatRead(DecoderInputBuffer bufferForRead) {
+        if (mRendererReconfigurationState == RECONFIGURATION_STATE.QUEUE_PENDING) {
+            if (DEBUG) { Log.d(LOGTAG, "[feedInput][QUEUE_PENDING] 2 formats in a row."); }
+            // We received two formats in a row. Clear the current buffer of any reconfiguration data
+            // associated with the first format.
+            bufferForRead.clear();
+            mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
+        }
+        onInputFormatChanged(mFormatHolder.format);
+    }
+
+    @Override
+    protected void handleEndOfStream(DecoderInputBuffer bufferForRead) {
+        if (mRendererReconfigurationState == RECONFIGURATION_STATE.QUEUE_PENDING) {
+            if (DEBUG) { Log.d(LOGTAG, "[feedInput][QUEUE_PENDING] isEndOfStream."); }
+            // We received a new format immediately before the end of the stream. We need to clear
+            // the corresponding reconfiguration data from the current buffer, but re-write it into
+            // a subsequent buffer if there are any (e.g. if the user seeks backwards).
+            bufferForRead.clear();
+            mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
+        }
+        mInputStreamEnded = true;
+        GeckoHlsSample sample = GeckoHlsSample.EOS;
+        calculatDuration(sample);
+    }
+
+    @Override
+    protected void handleSamplePreparation(DecoderInputBuffer bufferForRead) {
+        int csdInfoSize = mCSDInfo != null ? mCSDInfo.length : 0;
+        int dataSize = bufferForRead.data.limit();
+        int size = bufferForRead.isKeyFrame() ? csdInfoSize + dataSize : dataSize;
+        byte[] realData = new byte[size];
+        if (bufferForRead.isKeyFrame()) {
+            // Prepend the CSD information to the sample if it's a key frame.
+            System.arraycopy(mCSDInfo, 0, realData, 0, csdInfoSize);
+            bufferForRead.data.get(realData, csdInfoSize, dataSize);
+        } else {
+            bufferForRead.data.get(realData, 0, dataSize);
+        }
+        ByteBuffer buffer = ByteBuffer.wrap(realData);
+        mInputBuffer = bufferForRead.data;
+        mInputBuffer.clear();
+
+        CryptoInfo cryptoInfo = bufferForRead.isEncrypted() ? bufferForRead.cryptoInfo.getFrameworkCryptoInfoV16() : null;
+        BufferInfo bufferInfo = new BufferInfo();
+        // Flags in DecoderInputBuffer are synced with MediaCodec Buffer flags.
+        int flags = 0;
+        flags |= bufferForRead.isKeyFrame() ? MediaCodec.BUFFER_FLAG_KEY_FRAME : 0;
+        flags |= bufferForRead.isEndOfStream() ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0;
+        bufferInfo.set(0, size, bufferForRead.timeUs, flags);
+
+        assertTrue(mFormats.size() > 0);
+        // We add a new format in the list once format changes, so the formatIndex
+        // should indicate to the last(latest) format.
+        GeckoHlsSample sample = GeckoHlsSample.create(buffer,
+                                                      bufferInfo,
+                                                      cryptoInfo,
+                                                      mFormats.size() - 1);
+
+        // There's no duration information from the ExoPlayer's sample, we need
+        // to calculate it.
+        calculatDuration(sample);
+        mRendererReconfigurationState = RECONFIGURATION_STATE.NONE;
+    }
+
+    @Override
+    protected void onPositionReset(long positionUs, boolean joining) {
+        super.onPositionReset(positionUs, joining);
+        if (mInitialized && mRendererReconfigured && mFormats.size() != 0) {
+            if (DEBUG) { Log.d(LOGTAG, "[onPositionReset] WRITE_PENDING"); }
+            // Any reconfiguration data that we put shortly before the reset
+            // may be invalid. We avoid this issue by sending reconfiguration
+            // data following every position reset.
+            mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
+        }
+    }
+
+    @Override
+    protected boolean clearInputSamplesQueue() {
+        if (DEBUG) { Log.d(LOGTAG, "clearInputSamplesQueue"); }
+        mDemuxedInputSamples.clear();
+        mDemuxedNoDurationSamples.clear();
+        return true;
+    }
+
+    @Override
+    protected boolean canReconfigure(Format oldFormat, Format newFormat) {
+        boolean canReconfig = areAdaptationCompatible(oldFormat, newFormat)
+          && newFormat.width <= mCodecMaxValues.width && newFormat.height <= mCodecMaxValues.height
+          && newFormat.maxInputSize <= mCodecMaxValues.inputSize;
+        if (DEBUG) { Log.d(LOGTAG, "[canReconfigure] : " + canReconfig); }
+        return canReconfig;
+    }
+
+    @Override
+    protected void prepareReconfiguration() {
+        if (DEBUG) { Log.d(LOGTAG, "[onInputFormatChanged] starting reconfiguration !"); }
+        mRendererReconfigured = true;
+        mRendererReconfigurationState = RECONFIGURATION_STATE.WRITE_PENDING;
+    }
+
+    @Override
+    protected void updateCSDInfo(Format format) {
+        int size = 0;
+        for (int i = 0; i < format.initializationData.size(); i++) {
+            size += format.initializationData.get(i).length;
+        }
+        int startPos = 0;
+        mCSDInfo = new byte[size];
+        for (int i = 0; i < format.initializationData.size(); i++) {
+            byte[] data = format.initializationData.get(i);
+            System.arraycopy(data, 0, mCSDInfo, startPos, data.length);
+            startPos += data.length;
+        }
+        if (DEBUG) { Log.d(LOGTAG, "mCSDInfo [" + Utils.bytesToHex(mCSDInfo) + "]"); }
+    }
+
+    @Override
+    protected void notifyPlayerInputFormatChanged(Format newFormat) {
+        mPlayerEventDispatcher.onVideoInputFormatChanged(newFormat);
+    }
+
+    private void calculateSamplesWithin(GeckoHlsSample[] samples, int range) {
+        // Calculate the first 'range' elements.
+        for (int i = 0; i < range; i++) {
+            // Comparing among samples in the window.
+            for (int j = -2; j < 14; j++) {
+                if (i + j >= 0 &&
+                    i + j < range &&
+                    samples[i + j].info.presentationTimeUs > samples[i].info.presentationTimeUs) {
+                    samples[i].duration =
+                        Math.min(samples[i].duration,
+                                 samples[i + j].info.presentationTimeUs - samples[i].info.presentationTimeUs);
+                }
+            }
+        }
+    }
+
+    private void calculatDuration(GeckoHlsSample inputSample) {
+        /*
+         * NOTE :
+         * Since we customized renderer as a demuxer. Here we're not able to
+         * obtain duration from the DecoderInputBuffer as there's no duration inside.
+         * So we calcualte it by referring to nearby samples' timestamp.
+         * A temporary queue |mDemuxedNoDurationSamples| is used to queue demuxed
+         * samples from HlsMediaSource which have no duration information at first.
+         * We're choosing 16 as the comparing window size, because it's commonly
+         * used as a GOP size.
+         * Considering there're 16 demuxed samples in the _no duration_ queue already,
+         * e.g. |-2|-1|0|1|2|3|4|5|6|...|13|
+         * Once a new demuxed(No duration) sample X (17th) is put into the
+         * temporary queue,
+         * e.g. |-2|-1|0|1|2|3|4|5|6|...|13|X|
+         * we are able to calculate the correct duration for sample 0 by finding
+         * the closest but greater pts than sample 0 among these 16 samples,
+         * here, let's say sample -2 to 13.
+         */
+        if (inputSample != null) {
+            mDemuxedNoDurationSamples.offer(inputSample);
+        }
+        int sizeOfNoDura = mDemuxedNoDurationSamples.size();
+        // A calculation window we've ever found suitable for both HLS TS & FMP4.
+        int range = sizeOfNoDura >= 17 ? 17 : sizeOfNoDura;
+        GeckoHlsSample[] inputArray =
+            mDemuxedNoDurationSamples.toArray(new GeckoHlsSample[sizeOfNoDura]);
+        if (range >= 17 && !mInputStreamEnded) {
+            calculateSamplesWithin(inputArray, range);
+
+            GeckoHlsSample toQueue = mDemuxedNoDurationSamples.poll();
+            mDemuxedInputSamples.offer(toQueue);
+            if (DEBUG) {
+                Log.d(LOGTAG, "Demuxed sample PTS : " +
+                              toQueue.info.presentationTimeUs + ", duration :" +
+                              toQueue.duration + ", isKeyFrame(" +
+                              toQueue.isKeyFrame() + ", formatIndex(" +
+                              toQueue.formatIndex + "), queue size : " +
+                              mDemuxedInputSamples.size() + ", NoDuQueue size : " +
+                              mDemuxedNoDurationSamples.size());
+            }
+        } else if (mInputStreamEnded) {
+            calculateSamplesWithin(inputArray, sizeOfNoDura);
+
+            // NOTE : We're not able to calculate the duration for the last sample.
+            //        A workaround here is to assign a close duration to it.
+            long prevDuration = 33333;
+            GeckoHlsSample sample = null;
+            for (sample = mDemuxedNoDurationSamples.poll(); sample != null; sample = mDemuxedNoDurationSamples.poll()) {
+                if (sample.duration == Long.MAX_VALUE) {
+                    sample.duration = prevDuration;
+                    if (DEBUG) { Log.d(LOGTAG, "Adjust the PTS of the last sample to " + sample.duration + " (us)"); }
+                }
+                prevDuration = sample.duration;
+                if (DEBUG) {
+                    Log.d(LOGTAG, "last loop to offer samples - PTS : " +
+                                  sample.info.presentationTimeUs + ", Duration : " +
+                                  sample.duration + ", isEOS : " + sample.isEOS());
+                }
+                mDemuxedInputSamples.offer(sample);
+            }
+        }
+    }
+
+    // Return the time of first keyframe sample in the queue.
+    // If there's no key frame in the queue, return the MAX_VALUE so
+    // MFR won't mistake for that which the decode is getting slow.
+    public long getNextKeyFrameTime() {
+        long nextKeyFrameTime = Long.MAX_VALUE;
+        for (GeckoHlsSample sample : mDemuxedInputSamples) {
+            if (sample != null &&
+                (sample.info.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0) {
+                nextKeyFrameTime = sample.info.presentationTimeUs;
+                break;
+            }
+        }
+        return nextKeyFrameTime;
+    }
+
+    @Override
+    protected void onStreamChanged(Format[] formats) {
+        mStreamFormats = formats;
+    }
+
+    private static CodecMaxValues getCodecMaxValues(Format format, Format[] streamFormats) {
+        int maxWidth = format.width;
+        int maxHeight = format.height;
+        int maxInputSize = getMaxInputSize(format);
+        for (Format streamFormat : streamFormats) {
+            if (areAdaptationCompatible(format, streamFormat)) {
+                maxWidth = Math.max(maxWidth, streamFormat.width);
+                maxHeight = Math.max(maxHeight, streamFormat.height);
+                maxInputSize = Math.max(maxInputSize, getMaxInputSize(streamFormat));
+            }
+        }
+        return new CodecMaxValues(maxWidth, maxHeight, maxInputSize);
+    }
+
+    private static int getMaxInputSize(Format format) {
+        if (format.maxInputSize != Format.NO_VALUE) {
+            // The format defines an explicit maximum input size.
+            return format.maxInputSize;
+        }
+
+        if (format.width == Format.NO_VALUE || format.height == Format.NO_VALUE) {
+            // We can't infer a maximum input size without video dimensions.
+            return Format.NO_VALUE;
+        }
+
+        // Attempt to infer a maximum input size from the format.
+        int maxPixels;
+        int minCompressionRatio;
+        switch (format.sampleMimeType) {
+            case MimeTypes.VIDEO_H264:
+                // Round up width/height to an integer number of macroblocks.
+                maxPixels = ((format.width + 15) / 16) * ((format.height + 15) / 16) * 16 * 16;
+                minCompressionRatio = 2;
+                break;
+            default:
+                // Leave the default max input size.
+                return Format.NO_VALUE;
+        }
+        // Estimate the maximum input size assuming three channel 4:2:0 subsampled input frames.
+        return (maxPixels * 3) / (2 * minCompressionRatio);
+    }
+
+    private static boolean areAdaptationCompatible(Format first, Format second) {
+        return first.sampleMimeType.equals(second.sampleMimeType) &&
+               getRotationDegrees(first) == getRotationDegrees(second);
+    }
+
+    private static int getRotationDegrees(Format format) {
+        return format.rotationDegrees == Format.NO_VALUE ? 0 : format.rotationDegrees;
+    }
+
+    private static final class CodecMaxValues {
+        public final int width;
+        public final int height;
+        public final int inputSize;
+        public CodecMaxValues(int width, int height, int inputSize) {
+            this.width = width;
+            this.height = height;
+            this.inputSize = inputSize;
+        }
+    }
+}
new file mode 100644
--- /dev/null
+++ b/mobile/android/geckoview/src/main/java/org/mozilla/gecko/media/Utils.java
@@ -0,0 +1,41 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+package org.mozilla.gecko.media;
+
+import android.util.Log;
+
+public class Utils {
+    public static long getThreadId() {
+        Thread t = Thread.currentThread();
+        return t.getId();
+    }
+
+    public static String getThreadSignature() {
+        Thread t = Thread.currentThread();
+        long l = t.getId();
+        String name = t.getName();
+        long p = t.getPriority();
+        String gname = t.getThreadGroup().getName();
+        return (name
+                + ":(id)" + l
+                + ":(priority)" + p
+                + ":(group)" + gname);
+    }
+
+    public static void logThreadSignature() {
+        Log.d("ThreadUtils", getThreadSignature());
+    }
+
+    private final static char[] hexArray = "0123456789ABCDEF".toCharArray();
+    public static String bytesToHex(byte[] bytes) {
+        char[] hexChars = new char[bytes.length * 2];
+        for ( int j = 0; j < bytes.length; j++ ) {
+            int v = bytes[j] & 0xFF;
+            hexChars[j * 2] = hexArray[v >>> 4];
+            hexChars[j * 2 + 1] = hexArray[v & 0x0F];
+        }
+        return new String(hexChars);
+    }
+}
\ No newline at end of file