Commit e770e5c2 by Oliver Woodman

Multi-track - The (nearly) final step.

- Migrate demo app to use new APIs.
- Add multi-track support for ExtractorSampleSource case.
- Add multi-track support for SmoothStreaming use case.

The final step is to add support back for the DASH use case and
delete MultiTrackChunkSource. This is blocked on multi-period support
landing, in order to prevent a horrendous merge conflict. We also
need to update HLS to expose sensible track information.

Issue: #514
parent 57250036
...@@ -17,6 +17,7 @@ package com.google.android.exoplayer.demo; ...@@ -17,6 +17,7 @@ package com.google.android.exoplayer.demo;
import com.google.android.exoplayer.AspectRatioFrameLayout; import com.google.android.exoplayer.AspectRatioFrameLayout;
import com.google.android.exoplayer.ExoPlayer; import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.audio.AudioCapabilities; import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.audio.AudioCapabilitiesReceiver; import com.google.android.exoplayer.audio.AudioCapabilitiesReceiver;
import com.google.android.exoplayer.demo.player.DashRendererBuilder; import com.google.android.exoplayer.demo.player.DashRendererBuilder;
...@@ -33,6 +34,7 @@ import com.google.android.exoplayer.text.CaptionStyleCompat; ...@@ -33,6 +34,7 @@ import com.google.android.exoplayer.text.CaptionStyleCompat;
import com.google.android.exoplayer.text.Cue; import com.google.android.exoplayer.text.Cue;
import com.google.android.exoplayer.text.SubtitleLayout; import com.google.android.exoplayer.text.SubtitleLayout;
import com.google.android.exoplayer.util.DebugTextViewHelper; import com.google.android.exoplayer.util.DebugTextViewHelper;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util; import com.google.android.exoplayer.util.Util;
import com.google.android.exoplayer.util.VerboseLogUtil; import com.google.android.exoplayer.util.VerboseLogUtil;
...@@ -435,23 +437,34 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback, ...@@ -435,23 +437,34 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
}); });
Menu menu = popup.getMenu(); Menu menu = popup.getMenu();
// ID_OFFSET ensures we avoid clashing with Menu.NONE (which equals 0) // ID_OFFSET ensures we avoid clashing with Menu.NONE (which equals 0)
menu.add(MENU_GROUP_TRACKS, DemoPlayer.DISABLED_TRACK + ID_OFFSET, Menu.NONE, R.string.off); menu.add(MENU_GROUP_TRACKS, DemoPlayer.TRACK_DISABLED + ID_OFFSET, Menu.NONE, R.string.off);
if (trackCount == 1 && TextUtils.isEmpty(player.getTrackName(trackType, 0))) { for (int i = 0; i < trackCount; i++) {
menu.add(MENU_GROUP_TRACKS, DemoPlayer.PRIMARY_TRACK + ID_OFFSET, Menu.NONE, R.string.on); menu.add(MENU_GROUP_TRACKS, i + ID_OFFSET, Menu.NONE,
} else { buildTrackName(player.getTrackFormat(trackType, i)));
for (int i = 0; i < trackCount; i++) {
menu.add(MENU_GROUP_TRACKS, i + ID_OFFSET, Menu.NONE, player.getTrackName(trackType, i));
}
} }
menu.setGroupCheckable(MENU_GROUP_TRACKS, true, true); menu.setGroupCheckable(MENU_GROUP_TRACKS, true, true);
menu.findItem(player.getSelectedTrackIndex(trackType) + ID_OFFSET).setChecked(true); menu.findItem(player.getSelectedTrack(trackType) + ID_OFFSET).setChecked(true);
}
private static String buildTrackName(MediaFormat format) {
if (format.adaptive) {
return "auto";
} else if (MimeTypes.isVideo(format.mimeType)) {
return format.width + "x" + format.height;
} else if (MimeTypes.isAudio(format.mimeType)) {
return format.channelCount + "ch, " + format.sampleRate + "Hz";
} else if (MimeTypes.isText(format.mimeType) && !TextUtils.isEmpty(format.language)) {
return format.language;
} else {
return "unknown";
}
} }
private boolean onTrackItemClick(MenuItem item, int type) { private boolean onTrackItemClick(MenuItem item, int type) {
if (player == null || item.getGroupId() != MENU_GROUP_TRACKS) { if (player == null || item.getGroupId() != MENU_GROUP_TRACKS) {
return false; return false;
} }
player.selectTrack(type, item.getItemId() - ID_OFFSET); player.setSelectedTrack(type, item.getItemId() - ID_OFFSET);
return true; return true;
} }
......
...@@ -352,7 +352,7 @@ public class DashRendererBuilder implements RendererBuilder { ...@@ -352,7 +352,7 @@ public class DashRendererBuilder implements RendererBuilder {
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(trackNames, multiTrackChunkSources, renderers, bandwidthMeter); player.onRenderers(renderers, bandwidthMeter);
} }
private static int getWidevineSecurityLevel(StreamingDrmSessionManager sessionManager) { private static int getWidevineSecurityLevel(StreamingDrmSessionManager sessionManager) {
......
...@@ -23,12 +23,12 @@ import com.google.android.exoplayer.MediaCodecAudioTrackRenderer; ...@@ -23,12 +23,12 @@ import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecTrackRenderer; import com.google.android.exoplayer.MediaCodecTrackRenderer;
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException; import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer; import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TimeRange; import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.TrackRenderer; import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.audio.AudioTrack; import com.google.android.exoplayer.audio.AudioTrack;
import com.google.android.exoplayer.chunk.ChunkSampleSource; import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.Format; import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.dash.DashChunkSource; import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager; import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
import com.google.android.exoplayer.hls.HlsSampleSource; import com.google.android.exoplayer.hls.HlsSampleSource;
...@@ -46,7 +46,6 @@ import android.os.Looper; ...@@ -46,7 +46,6 @@ import android.os.Looper;
import android.view.Surface; import android.view.Surface;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArrayList;
...@@ -148,9 +147,8 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi ...@@ -148,9 +147,8 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
public static final int STATE_BUFFERING = ExoPlayer.STATE_BUFFERING; public static final int STATE_BUFFERING = ExoPlayer.STATE_BUFFERING;
public static final int STATE_READY = ExoPlayer.STATE_READY; public static final int STATE_READY = ExoPlayer.STATE_READY;
public static final int STATE_ENDED = ExoPlayer.STATE_ENDED; public static final int STATE_ENDED = ExoPlayer.STATE_ENDED;
public static final int TRACK_DISABLED = ExoPlayer.TRACK_DISABLED;
public static final int DISABLED_TRACK = -1; public static final int TRACK_DEFAULT = ExoPlayer.TRACK_DEFAULT;
public static final int PRIMARY_TRACK = 0;
public static final int RENDERER_COUNT = 4; public static final int RENDERER_COUNT = 4;
public static final int TYPE_VIDEO = 0; public static final int TYPE_VIDEO = 0;
...@@ -179,9 +177,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi ...@@ -179,9 +177,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
private int videoTrackToRestore; private int videoTrackToRestore;
private BandwidthMeter bandwidthMeter; private BandwidthMeter bandwidthMeter;
private MultiTrackChunkSource[] multiTrackSources;
private String[][] trackNames;
private int[] selectedTracks;
private boolean backgrounded; private boolean backgrounded;
private CaptionListener captionListener; private CaptionListener captionListener;
...@@ -198,9 +193,8 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi ...@@ -198,9 +193,8 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
listeners = new CopyOnWriteArrayList<>(); listeners = new CopyOnWriteArrayList<>();
lastReportedPlaybackState = STATE_IDLE; lastReportedPlaybackState = STATE_IDLE;
rendererBuildingState = RENDERER_BUILDING_STATE_IDLE; rendererBuildingState = RENDERER_BUILDING_STATE_IDLE;
selectedTracks = new int[RENDERER_COUNT];
// Disable text initially. // Disable text initially.
selectedTracks[TYPE_TEXT] = DISABLED_TRACK; player.setSelectedTrack(TYPE_TEXT, TRACK_DISABLED);
} }
public PlayerControl getPlayerControl() { public PlayerControl getPlayerControl() {
...@@ -245,28 +239,20 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi ...@@ -245,28 +239,20 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
pushSurface(true); pushSurface(true);
} }
@SuppressWarnings("deprecation")
public int getTrackCount(int type) { public int getTrackCount(int type) {
return !player.getRendererHasMedia(type) ? 0 : trackNames[type].length; return player.getTrackCount(type);
} }
public String getTrackName(int type, int index) { public MediaFormat getTrackFormat(int type, int index) {
return trackNames[type][index]; return player.getTrackFormat(type, index);
} }
public int getSelectedTrackIndex(int type) { public int getSelectedTrack(int type) {
return selectedTracks[type]; return player.getSelectedTrack(type);
} }
public void selectTrack(int type, int index) { public void setSelectedTrack(int type, int index) {
if (selectedTracks[type] == index) { player.setSelectedTrack(type, index);
return;
}
selectedTracks[type] = index;
pushTrackSelection(type, true);
if (type == TYPE_TEXT && index == DISABLED_TRACK && captionListener != null) {
captionListener.onCues(Collections.<Cue>emptyList());
}
} }
public boolean getBackgrounded() { public boolean getBackgrounded() {
...@@ -279,11 +265,11 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi ...@@ -279,11 +265,11 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
} }
this.backgrounded = backgrounded; this.backgrounded = backgrounded;
if (backgrounded) { if (backgrounded) {
videoTrackToRestore = getSelectedTrackIndex(TYPE_VIDEO); videoTrackToRestore = getSelectedTrack(TYPE_VIDEO);
selectTrack(TYPE_VIDEO, DISABLED_TRACK); setSelectedTrack(TYPE_VIDEO, TRACK_DISABLED);
blockingClearSurface(); blockingClearSurface();
} else { } else {
selectTrack(TYPE_VIDEO, videoTrackToRestore); setSelectedTrack(TYPE_VIDEO, videoTrackToRestore);
} }
} }
...@@ -294,7 +280,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi ...@@ -294,7 +280,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
rendererBuilder.cancel(); rendererBuilder.cancel();
videoFormat = null; videoFormat = null;
videoRenderer = null; videoRenderer = null;
multiTrackSources = null;
rendererBuildingState = RENDERER_BUILDING_STATE_BUILDING; rendererBuildingState = RENDERER_BUILDING_STATE_BUILDING;
maybeReportPlayerState(); maybeReportPlayerState();
rendererBuilder.buildRenderers(this); rendererBuilder.buildRenderers(this);
...@@ -303,51 +288,25 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi ...@@ -303,51 +288,25 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
/** /**
* Invoked with the results from a {@link RendererBuilder}. * Invoked with the results from a {@link RendererBuilder}.
* *
* @param trackNames The names of the available tracks, indexed by {@link DemoPlayer} TYPE_*
* constants. May be null if the track names are unknown. An individual element may be null
* if the track names are unknown for the corresponding type.
* @param multiTrackSources Sources capable of switching between multiple available tracks,
* indexed by {@link DemoPlayer} TYPE_* constants. May be null if there are no types with
* multiple tracks. An individual element may be null if it does not have multiple tracks.
* @param renderers Renderers indexed by {@link DemoPlayer} TYPE_* constants. An individual * @param renderers Renderers indexed by {@link DemoPlayer} TYPE_* constants. An individual
* element may be null if there do not exist tracks of the corresponding type. * element may be null if there do not exist tracks of the corresponding type.
* @param bandwidthMeter Provides an estimate of the currently available bandwidth. May be null. * @param bandwidthMeter Provides an estimate of the currently available bandwidth. May be null.
*/ */
/* package */ void onRenderers(String[][] trackNames, /* package */ void onRenderers(TrackRenderer[] renderers, BandwidthMeter bandwidthMeter) {
MultiTrackChunkSource[] multiTrackSources, TrackRenderer[] renderers, for (int i = 0; i < RENDERER_COUNT; i++) {
BandwidthMeter bandwidthMeter) { if (renderers[i] == null) {
// Normalize the results.
if (trackNames == null) {
trackNames = new String[RENDERER_COUNT][];
}
if (multiTrackSources == null) {
multiTrackSources = new MultiTrackChunkSource[RENDERER_COUNT];
}
for (int rendererIndex = 0; rendererIndex < RENDERER_COUNT; rendererIndex++) {
if (renderers[rendererIndex] == null) {
// Convert a null renderer to a dummy renderer. // Convert a null renderer to a dummy renderer.
renderers[rendererIndex] = new DummyTrackRenderer(); renderers[i] = new DummyTrackRenderer();
}
if (trackNames[rendererIndex] == null) {
// Convert a null trackNames to an array of suitable length.
int trackCount = multiTrackSources[rendererIndex] != null
? multiTrackSources[rendererIndex].getMultiTrackCount() : 1;
trackNames[rendererIndex] = new String[trackCount];
} }
} }
// Complete preparation. // Complete preparation.
this.trackNames = trackNames;
this.videoRenderer = renderers[TYPE_VIDEO]; this.videoRenderer = renderers[TYPE_VIDEO];
this.codecCounters = videoRenderer instanceof MediaCodecTrackRenderer this.codecCounters = videoRenderer instanceof MediaCodecTrackRenderer
? ((MediaCodecTrackRenderer) videoRenderer).codecCounters ? ((MediaCodecTrackRenderer) videoRenderer).codecCounters
: renderers[TYPE_AUDIO] instanceof MediaCodecTrackRenderer : renderers[TYPE_AUDIO] instanceof MediaCodecTrackRenderer
? ((MediaCodecTrackRenderer) renderers[TYPE_AUDIO]).codecCounters : null; ? ((MediaCodecTrackRenderer) renderers[TYPE_AUDIO]).codecCounters : null;
this.multiTrackSources = multiTrackSources;
this.bandwidthMeter = bandwidthMeter; this.bandwidthMeter = bandwidthMeter;
pushSurface(false); pushSurface(false);
pushTrackSelection(TYPE_VIDEO, true);
pushTrackSelection(TYPE_AUDIO, true);
pushTrackSelection(TYPE_TEXT, true);
player.prepare(renderers); player.prepare(renderers);
rendererBuildingState = RENDERER_BUILDING_STATE_BUILT; rendererBuildingState = RENDERER_BUILDING_STATE_BUILT;
} }
...@@ -537,14 +496,14 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi ...@@ -537,14 +496,14 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
@Override @Override
public void onCues(List<Cue> cues) { public void onCues(List<Cue> cues) {
if (captionListener != null && selectedTracks[TYPE_TEXT] != DISABLED_TRACK) { if (captionListener != null && getSelectedTrack(TYPE_TEXT) != TRACK_DISABLED) {
captionListener.onCues(cues); captionListener.onCues(cues);
} }
} }
@Override @Override
public void onMetadata(Map<String, Object> metadata) { public void onMetadata(Map<String, Object> metadata) {
if (id3MetadataListener != null && selectedTracks[TYPE_METADATA] != DISABLED_TRACK) { if (id3MetadataListener != null && getSelectedTrack(TYPE_METADATA) != TRACK_DISABLED) {
id3MetadataListener.onId3Metadata(metadata); id3MetadataListener.onId3Metadata(metadata);
} }
} }
...@@ -620,26 +579,4 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi ...@@ -620,26 +579,4 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
} }
} }
@SuppressWarnings("deprecation")
private void pushTrackSelection(int type, boolean allowRendererEnable) {
if (multiTrackSources == null) {
return;
}
int trackIndex = selectedTracks[type];
if (trackIndex == DISABLED_TRACK) {
player.setRendererEnabled(type, false);
} else if (multiTrackSources[type] == null) {
player.setRendererEnabled(type, allowRendererEnable);
} else {
boolean playWhenReady = player.getPlayWhenReady();
player.setPlayWhenReady(false);
player.setRendererEnabled(type, false);
player.sendMessage(multiTrackSources[type], MultiTrackChunkSource.MSG_SELECT_TRACK,
trackIndex);
player.setRendererEnabled(type, allowRendererEnable);
player.setPlayWhenReady(playWhenReady);
}
}
} }
...@@ -74,7 +74,7 @@ public class ExtractorRendererBuilder implements RendererBuilder { ...@@ -74,7 +74,7 @@ public class ExtractorRendererBuilder implements RendererBuilder {
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(null, null, renderers, bandwidthMeter); player.onRenderers(renderers, bandwidthMeter);
} }
@Override @Override
......
...@@ -162,7 +162,7 @@ public class HlsRendererBuilder implements RendererBuilder { ...@@ -162,7 +162,7 @@ public class HlsRendererBuilder implements RendererBuilder {
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_METADATA] = id3Renderer; renderers[DemoPlayer.TYPE_METADATA] = id3Renderer;
renderers[DemoPlayer.TYPE_TEXT] = closedCaptionRenderer; renderers[DemoPlayer.TYPE_TEXT] = closedCaptionRenderer;
player.onRenderers(null, null, renderers, bandwidthMeter); player.onRenderers(renderers, bandwidthMeter);
} }
} }
......
...@@ -18,15 +18,12 @@ package com.google.android.exoplayer.demo.player; ...@@ -18,15 +18,12 @@ package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.DefaultLoadControl; import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl; import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer; import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer; import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TrackRenderer; import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.audio.AudioCapabilities; import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.chunk.ChunkSampleSource; import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.ChunkSource; import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil; import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder; import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.drm.DrmSessionManager; import com.google.android.exoplayer.drm.DrmSessionManager;
...@@ -37,8 +34,8 @@ import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource; ...@@ -37,8 +34,8 @@ import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingTrackSelector;
import com.google.android.exoplayer.text.TextTrackRenderer; import com.google.android.exoplayer.text.TextTrackRenderer;
import com.google.android.exoplayer.text.ttml.TtmlParser;
import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DefaultAllocator; import com.google.android.exoplayer.upstream.DefaultAllocator;
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter; import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
...@@ -160,126 +157,78 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder { ...@@ -160,126 +157,78 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder {
} }
} }
// Obtain stream elements for playback.
int audioStreamElementCount = 0;
int textStreamElementCount = 0;
int videoStreamElementIndex = -1;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) {
audioStreamElementCount++;
} else if (manifest.streamElements[i].type == StreamElement.TYPE_TEXT) {
textStreamElementCount++;
} else if (videoStreamElementIndex == -1
&& manifest.streamElements[i].type == StreamElement.TYPE_VIDEO) {
videoStreamElementIndex = i;
}
}
// Determine which video tracks we should use for playback.
int[] videoTrackIndices = null;
if (videoStreamElementIndex != -1) {
try {
videoTrackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(context,
Arrays.asList(manifest.streamElements[videoStreamElementIndex].tracks), null, false);
} catch (DecoderQueryException e) {
player.onRenderersError(e);
return;
}
}
// Build the video renderer. // Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer; DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
if (videoTrackIndices == null || videoTrackIndices.length == 0) { ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
videoRenderer = null; new TrackSelector(context, StreamElement.TYPE_VIDEO), videoDataSource,
} else { new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
videoStreamElementIndex, videoTrackIndices, videoDataSource, DemoPlayer.TYPE_VIDEO);
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, mainHandler, player, 50);
DemoPlayer.TYPE_VIDEO);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
}
// Build the audio renderer. // Build the audio renderer.
final String[] audioTrackNames; DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
final MultiTrackChunkSource audioChunkSource; ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
final MediaCodecAudioTrackRenderer audioRenderer; new TrackSelector(context, StreamElement.TYPE_AUDIO), audioDataSource, null,
if (audioStreamElementCount == 0) { LIVE_EDGE_LATENCY_MS);
audioTrackNames = null; ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
audioChunkSource = null; AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
audioRenderer = null; DemoPlayer.TYPE_AUDIO);
} else { TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
audioTrackNames = new String[audioStreamElementCount]; drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context));
ChunkSource[] audioChunkSources = new ChunkSource[audioStreamElementCount];
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator audioFormatEvaluator = new FormatEvaluator.FixedEvaluator();
audioStreamElementCount = 0;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) {
audioTrackNames[audioStreamElementCount] = manifest.streamElements[i].name;
audioChunkSources[audioStreamElementCount] = new SmoothStreamingChunkSource(
manifestFetcher, i, new int[] {0}, audioDataSource, audioFormatEvaluator,
LIVE_EDGE_LATENCY_MS);
audioStreamElementCount++;
}
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player, AudioCapabilities.getCapabilities(context));
}
// Build the text renderer. // Build the text renderer.
final String[] textTrackNames; DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
final MultiTrackChunkSource textChunkSource; ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
final TrackRenderer textRenderer; new TrackSelector(context, StreamElement.TYPE_TEXT), textDataSource, null,
if (textStreamElementCount == 0) { LIVE_EDGE_LATENCY_MS);
textTrackNames = null; ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
textChunkSource = null; TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
textRenderer = null; DemoPlayer.TYPE_TEXT);
} else { TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
textTrackNames = new String[textStreamElementCount]; mainHandler.getLooper());
ChunkSource[] textChunkSources = new ChunkSource[textStreamElementCount];
DataSource ttmlDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator ttmlFormatEvaluator = new FormatEvaluator.FixedEvaluator();
textStreamElementCount = 0;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_TEXT) {
textTrackNames[textStreamElementCount] = manifest.streamElements[i].language;
textChunkSources[textStreamElementCount] = new SmoothStreamingChunkSource(
manifestFetcher, i, new int[] {0}, ttmlDataSource, ttmlFormatEvaluator,
LIVE_EDGE_LATENCY_MS);
textStreamElementCount++;
}
}
textChunkSource = new MultiTrackChunkSource(textChunkSources);
ChunkSampleSource ttmlSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
textRenderer = new TextTrackRenderer(ttmlSampleSource, player, mainHandler.getLooper(),
new TtmlParser());
}
// Invoke the callback. // Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
trackNames[DemoPlayer.TYPE_TEXT] = textTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
multiTrackChunkSources[DemoPlayer.TYPE_TEXT] = textChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(trackNames, multiTrackChunkSources, renderers, bandwidthMeter); player.onRenderers(renderers, bandwidthMeter);
}
}
private static final class TrackSelector implements SmoothStreamingTrackSelector {
private final Context context;
private final int elementType;
private TrackSelector(Context context, int type) {
this.context = context;
this.elementType = type;
}
@Override
public void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException {
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == elementType) {
if (elementType == StreamElement.TYPE_VIDEO) {
int[] trackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, Arrays.asList(manifest.streamElements[i].tracks), null, false);
output.adaptiveTrack(manifest, i, trackIndices);
for (int j = 0; j < trackIndices.length; j++) {
output.fixedTrack(manifest, i, trackIndices[j]);
}
} else {
for (int j = 0; j < manifest.streamElements[i].tracks.length; j++) {
output.fixedTrack(manifest, i, j);
}
}
}
}
} }
} }
......
...@@ -28,6 +28,7 @@ import android.text.TextUtils; ...@@ -28,6 +28,7 @@ import android.text.TextUtils;
import android.util.Log; import android.util.Log;
import android.util.Pair; import android.util.Pair;
import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
/** /**
...@@ -42,7 +43,7 @@ public final class MediaCodecUtil { ...@@ -42,7 +43,7 @@ public final class MediaCodecUtil {
* Such failures are not expected in normal operation and are normally temporary (e.g. if the * Such failures are not expected in normal operation and are normally temporary (e.g. if the
* mediaserver process has crashed and is yet to restart). * mediaserver process has crashed and is yet to restart).
*/ */
public static class DecoderQueryException extends Exception { public static class DecoderQueryException extends IOException {
private DecoderQueryException(Throwable cause) { private DecoderQueryException(Throwable cause) {
super("Failed to query underlying media codecs", cause); super("Failed to query underlying media codecs", cause);
......
...@@ -187,15 +187,6 @@ public final class MediaFormat { ...@@ -187,15 +187,6 @@ public final class MediaFormat {
NO_VALUE, NO_VALUE, null, OFFSET_SAMPLE_RELATIVE, null, false, NO_VALUE, NO_VALUE); NO_VALUE, NO_VALUE, null, OFFSET_SAMPLE_RELATIVE, null, false, NO_VALUE, NO_VALUE);
} }
public static MediaFormat createAdaptiveFormat(String mimeType) {
return createAdaptiveFormat(mimeType, C.UNKNOWN_TIME_US);
}
public static MediaFormat createAdaptiveFormat(String mimeType, long durationUs) {
return new MediaFormat(mimeType, NO_VALUE, durationUs, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, NO_VALUE, null, OFFSET_SAMPLE_RELATIVE, null, true, NO_VALUE, NO_VALUE);
}
/* package */ MediaFormat(String mimeType, int maxInputSize, long durationUs, int width, /* package */ MediaFormat(String mimeType, int maxInputSize, long durationUs, int width,
int height, int rotationDegrees, float pixelWidthHeightRatio, int channelCount, int height, int rotationDegrees, float pixelWidthHeightRatio, int channelCount,
int sampleRate, String language, long subsampleOffsetUs, List<byte[]> initializationData, int sampleRate, String language, long subsampleOffsetUs, List<byte[]> initializationData,
...@@ -236,6 +227,12 @@ public final class MediaFormat { ...@@ -236,6 +227,12 @@ public final class MediaFormat {
initializationData, adaptive, maxWidth, maxHeight); initializationData, adaptive, maxWidth, maxHeight);
} }
public MediaFormat copyWithAdaptive(boolean adaptive) {
return new MediaFormat(mimeType, maxInputSize, durationUs, width, height, rotationDegrees,
pixelWidthHeightRatio, channelCount, sampleRate, language, subsampleOffsetUs,
initializationData, adaptive, maxWidth, maxHeight);
}
/** /**
* @return A {@link MediaFormat} representation of this format. * @return A {@link MediaFormat} representation of this format.
*/ */
......
...@@ -134,7 +134,9 @@ public class ChunkSampleSource implements SampleSource, SampleSourceReader, Load ...@@ -134,7 +134,9 @@ public class ChunkSampleSource implements SampleSource, SampleSourceReader, Load
} else if (!chunkSource.prepare()) { } else if (!chunkSource.prepare()) {
return false; return false;
} }
loader = new Loader("Loader:" + chunkSource.getFormat(0).mimeType); if (chunkSource.getTrackCount() > 0) {
loader = new Loader("Loader:" + chunkSource.getFormat(0).mimeType);
}
state = STATE_PREPARED; state = STATE_PREPARED;
return true; return true;
} }
......
...@@ -67,11 +67,21 @@ public interface ChunkSource { ...@@ -67,11 +67,21 @@ public interface ChunkSource {
MediaFormat getFormat(int track); MediaFormat getFormat(int track);
/** /**
* Enable the source for the specified track.
* <p>
* This method should only be called after the source has been prepared, and when the source is
* disabled.
*
* @param track The track index.
*/
void enable(int track);
/**
* Adaptive video {@link ChunkSource} implementations must return a copy of the provided * Adaptive video {@link ChunkSource} implementations must return a copy of the provided
* {@link MediaFormat} with the maximum video dimensions set. Other implementations can return * {@link MediaFormat} with the maximum video dimensions set. Other implementations can return
* the provided {@link MediaFormat} directly. * the provided {@link MediaFormat} directly.
* <p> * <p>
* This method should only be called after the source has been prepared. * This method should only be called when the source is enabled.
* *
* @param format The format to be copied or returned. * @param format The format to be copied or returned.
* @return A copy of the provided {@link MediaFormat} with the maximum video dimensions set, or * @return A copy of the provided {@link MediaFormat} with the maximum video dimensions set, or
...@@ -80,16 +90,6 @@ public interface ChunkSource { ...@@ -80,16 +90,6 @@ public interface ChunkSource {
MediaFormat getWithMaxVideoDimensions(MediaFormat format); MediaFormat getWithMaxVideoDimensions(MediaFormat format);
/** /**
* Enable the source for the specified track.
* <p>
* This method should only be called after the source has been prepared, and when the source is
* disabled.
*
* @param track The track index.
*/
void enable(int track);
/**
* Indicates to the source that it should still be checking for updates to the stream. * Indicates to the source that it should still be checking for updates to the stream.
* <p> * <p>
* This method should only be called when the source is enabled. * This method should only be called when the source is enabled.
......
...@@ -36,6 +36,7 @@ import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.Stre ...@@ -36,6 +36,7 @@ import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.Stre
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.TrackElement; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.TrackElement;
import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec; import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.CodecSpecificDataUtil; import com.google.android.exoplayer.util.CodecSpecificDataUtil;
import com.google.android.exoplayer.util.ManifestFetcher; import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.MimeTypes; import com.google.android.exoplayer.util.MimeTypes;
...@@ -46,6 +47,7 @@ import android.util.Base64; ...@@ -46,6 +47,7 @@ import android.util.Base64;
import android.util.SparseArray; import android.util.SparseArray;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
...@@ -53,31 +55,33 @@ import java.util.List; ...@@ -53,31 +55,33 @@ import java.util.List;
/** /**
* An {@link ChunkSource} for SmoothStreaming. * An {@link ChunkSource} for SmoothStreaming.
*/ */
public class SmoothStreamingChunkSource implements ChunkSource { public class SmoothStreamingChunkSource implements ChunkSource,
SmoothStreamingTrackSelector.Output {
private static final int MINIMUM_MANIFEST_REFRESH_PERIOD_MS = 5000; private static final int MINIMUM_MANIFEST_REFRESH_PERIOD_MS = 5000;
private static final int INITIALIZATION_VECTOR_SIZE = 8; private static final int INITIALIZATION_VECTOR_SIZE = 8;
private final MediaFormat mediaFormat; private final SmoothStreamingTrackSelector trackSelector;
private final DataSource dataSource; private final DataSource dataSource;
private final FormatEvaluator formatEvaluator;
private final Evaluation evaluation; private final Evaluation evaluation;
private final long liveEdgeLatencyUs; private final long liveEdgeLatencyUs;
private final int maxWidth; private final TrackEncryptionBox[] trackEncryptionBoxes;
private final int maxHeight; private final ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
private final DrmInitData.Mapped drmInitData;
private final FormatEvaluator adaptiveFormatEvaluator;
// The tracks exposed by this source.
private final ArrayList<ExposedTrack> tracks;
// Mappings from manifest track key.
private final SparseArray<ChunkExtractorWrapper> extractorWrappers; private final SparseArray<ChunkExtractorWrapper> extractorWrappers;
private final SparseArray<MediaFormat> mediaFormats; private final SparseArray<MediaFormat> mediaFormats;
private final DrmInitData drmInitData;
private final Format[] formats;
private final ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
private final int streamElementIndex;
private boolean prepareCalled;
private SmoothStreamingManifest currentManifest; private SmoothStreamingManifest currentManifest;
private int currentManifestChunkOffset; private int currentManifestChunkOffset;
private boolean finishedCurrentManifest; private boolean currentManifestFinished;
private ExposedTrack enabledTrack;
private IOException fatalError; private IOException fatalError;
/** /**
...@@ -88,10 +92,7 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -88,10 +92,7 @@ public class SmoothStreamingChunkSource implements ChunkSource {
* *
* @param manifestFetcher A fetcher for the manifest, which must have already successfully * @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load. * completed an initial load.
* @param streamElementIndex The index of the stream element in the manifest to be provided by * @param trackSelector Selects tracks from the manifest to be exposed by this source.
* the source.
* @param trackIndices The indices of the tracks within the stream element to be considered by
* the source. May be null if all tracks within the element should be considered.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats. * @param formatEvaluator Selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
...@@ -101,122 +102,107 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -101,122 +102,107 @@ public class SmoothStreamingChunkSource implements ChunkSource {
* Hence a small value may increase the probability of rebuffering and playback failures. * Hence a small value may increase the probability of rebuffering and playback failures.
*/ */
public SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher, public SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher,
int streamElementIndex, int[] trackIndices, DataSource dataSource, SmoothStreamingTrackSelector trackSelector, DataSource dataSource,
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) { FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) {
this(manifestFetcher, manifestFetcher.getManifest(), streamElementIndex, trackIndices, this(manifestFetcher, manifestFetcher.getManifest(), trackSelector, dataSource, formatEvaluator,
dataSource, formatEvaluator, liveEdgeLatencyMs); liveEdgeLatencyMs);
} }
/** /**
* Constructor to use for fixed duration content. * Constructor to use for fixed duration content.
* *
* @param manifest The manifest parsed from {@code baseUrl + "/Manifest"}. * @param manifest The manifest parsed from {@code baseUrl + "/Manifest"}.
* @param streamElementIndex The index of the stream element in the manifest to be provided by * @param trackSelector Selects tracks from the manifest to be exposed by this source.
* the source.
* @param trackIndices The indices of the tracks within the stream element to be considered by
* the source. May be null if all tracks within the element should be considered.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats. * @param formatEvaluator Selects from the available formats.
*/ */
public SmoothStreamingChunkSource(SmoothStreamingManifest manifest, int streamElementIndex, public SmoothStreamingChunkSource(SmoothStreamingManifest manifest,
int[] trackIndices, DataSource dataSource, FormatEvaluator formatEvaluator) { SmoothStreamingTrackSelector trackSelector, DataSource dataSource,
this(null, manifest, streamElementIndex, trackIndices, dataSource, formatEvaluator, 0); FormatEvaluator formatEvaluator) {
this(null, manifest, trackSelector, dataSource, formatEvaluator, 0);
} }
private SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher, private SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher,
SmoothStreamingManifest initialManifest, int streamElementIndex, int[] trackIndices, SmoothStreamingManifest initialManifest, SmoothStreamingTrackSelector trackSelector,
DataSource dataSource, FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) { DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs) {
this.manifestFetcher = manifestFetcher; this.manifestFetcher = manifestFetcher;
this.streamElementIndex = streamElementIndex;
this.currentManifest = initialManifest; this.currentManifest = initialManifest;
this.trackSelector = trackSelector;
this.dataSource = dataSource; this.dataSource = dataSource;
this.formatEvaluator = formatEvaluator; this.adaptiveFormatEvaluator = adaptiveFormatEvaluator;
this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000; this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000;
StreamElement streamElement = getElement(initialManifest);
// TODO: Remove this and pass proper formats instead (b/22996976).
mediaFormat = MediaFormat.createFormatForMimeType(streamElement.tracks[0].format.mimeType,
initialManifest.durationUs);
evaluation = new Evaluation(); evaluation = new Evaluation();
tracks = new ArrayList<>();
extractorWrappers = new SparseArray<>();
mediaFormats = new SparseArray<>();
TrackEncryptionBox[] trackEncryptionBoxes = null;
ProtectionElement protectionElement = initialManifest.protectionElement; ProtectionElement protectionElement = initialManifest.protectionElement;
if (protectionElement != null) { if (protectionElement != null) {
byte[] keyId = getKeyId(protectionElement.data); byte[] keyId = getProtectionElementKeyId(protectionElement.data);
trackEncryptionBoxes = new TrackEncryptionBox[1]; trackEncryptionBoxes = new TrackEncryptionBox[1];
trackEncryptionBoxes[0] = new TrackEncryptionBox(true, INITIALIZATION_VECTOR_SIZE, keyId); trackEncryptionBoxes[0] = new TrackEncryptionBox(true, INITIALIZATION_VECTOR_SIZE, keyId);
DrmInitData.Mapped drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4); drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4);
drmInitData.put(protectionElement.uuid, protectionElement.data); drmInitData.put(protectionElement.uuid, protectionElement.data);
this.drmInitData = drmInitData;
} else { } else {
trackEncryptionBoxes = null;
drmInitData = null; drmInitData = null;
} }
int trackCount = trackIndices != null ? trackIndices.length : streamElement.tracks.length;
formats = new Format[trackCount];
extractorWrappers = new SparseArray<>();
mediaFormats = new SparseArray<>();
int maxWidth = 0;
int maxHeight = 0;
for (int i = 0; i < trackCount; i++) {
int trackIndex = trackIndices != null ? trackIndices[i] : i;
formats[i] = streamElement.tracks[trackIndex].format;
maxWidth = Math.max(maxWidth, formats[i].width);
maxHeight = Math.max(maxHeight, formats[i].height);
MediaFormat mediaFormat = getMediaFormat(streamElement, trackIndex);
int trackType = streamElement.type == StreamElement.TYPE_VIDEO ? Track.TYPE_vide
: streamElement.type == StreamElement.TYPE_AUDIO ? Track.TYPE_soun
: Track.TYPE_text;
FragmentedMp4Extractor extractor = new FragmentedMp4Extractor(
FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME);
extractor.setTrack(new Track(trackIndex, trackType, streamElement.timescale,
initialManifest.durationUs, mediaFormat, trackEncryptionBoxes,
trackType == Track.TYPE_vide ? 4 : -1));
extractorWrappers.put(trackIndex, new ChunkExtractorWrapper(extractor));
mediaFormats.put(trackIndex, mediaFormat);
}
this.maxWidth = maxWidth;
this.maxHeight = maxHeight;
Arrays.sort(formats, new DecreasingBandwidthComparator());
} }
// ChunkSource implementation.
@Override @Override
public final MediaFormat getWithMaxVideoDimensions(MediaFormat format) { public void maybeThrowError() throws IOException {
return MimeTypes.isVideo(mediaFormat.mimeType) if (fatalError != null) {
? format.copyWithMaxVideoDimensions(maxWidth, maxHeight) : format; throw fatalError;
} else {
manifestFetcher.maybeThrowError();
}
} }
@Override @Override
public boolean prepare() { public boolean prepare() {
return true; if (!prepareCalled) {
prepareCalled = true;
try {
trackSelector.selectTracks(currentManifest, this);
} catch (IOException e) {
fatalError = e;
}
}
return fatalError == null;
} }
@Override @Override
public int getTrackCount() { public int getTrackCount() {
return 1; return tracks.size();
} }
@Override @Override
public final MediaFormat getFormat(int track) { public final MediaFormat getFormat(int track) {
return mediaFormat; return tracks.get(track).format;
} }
@Override @Override
public void enable(int track) { public void enable(int track) {
fatalError = null; fatalError = null;
formatEvaluator.enable(); evaluation.format = null;
enabledTrack = tracks.get(track);
if (enabledTrack.isAdaptive()) {
adaptiveFormatEvaluator.enable();
}
if (manifestFetcher != null) { if (manifestFetcher != null) {
manifestFetcher.enable(); manifestFetcher.enable();
} }
} }
@Override @Override
public void disable(List<? extends MediaChunk> queue) { public final MediaFormat getWithMaxVideoDimensions(MediaFormat format) {
formatEvaluator.disable(); if (enabledTrack.isAdaptive() && MimeTypes.isVideo(format.mimeType)) {
if (manifestFetcher != null) { return format.copyWithMaxVideoDimensions(
manifestFetcher.disable(); enabledTrack.adaptiveMaxWidth, enabledTrack.adaptiveMaxHeight);
} }
return format;
} }
@Override @Override
...@@ -227,9 +213,9 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -227,9 +213,9 @@ public class SmoothStreamingChunkSource implements ChunkSource {
SmoothStreamingManifest newManifest = manifestFetcher.getManifest(); SmoothStreamingManifest newManifest = manifestFetcher.getManifest();
if (currentManifest != newManifest && newManifest != null) { if (currentManifest != newManifest && newManifest != null) {
StreamElement currentElement = getElement(currentManifest); StreamElement currentElement = currentManifest.streamElements[enabledTrack.elementIndex];
int currentElementChunkCount = currentElement.chunkCount; int currentElementChunkCount = currentElement.chunkCount;
StreamElement newElement = getElement(newManifest); StreamElement newElement = newManifest.streamElements[enabledTrack.elementIndex];
if (currentElementChunkCount == 0 || newElement.chunkCount == 0) { if (currentElementChunkCount == 0 || newElement.chunkCount == 0) {
// There's no overlap between the old and new elements because at least one is empty. // There's no overlap between the old and new elements because at least one is empty.
currentManifestChunkOffset += currentElementChunkCount; currentManifestChunkOffset += currentElementChunkCount;
...@@ -246,10 +232,10 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -246,10 +232,10 @@ public class SmoothStreamingChunkSource implements ChunkSource {
} }
} }
currentManifest = newManifest; currentManifest = newManifest;
finishedCurrentManifest = false; currentManifestFinished = false;
} }
if (finishedCurrentManifest && (SystemClock.elapsedRealtime() if (currentManifestFinished && (SystemClock.elapsedRealtime()
> manifestFetcher.getManifestLoadStartTimestamp() + MINIMUM_MANIFEST_REFRESH_PERIOD_MS)) { > manifestFetcher.getManifestLoadStartTimestamp() + MINIMUM_MANIFEST_REFRESH_PERIOD_MS)) {
manifestFetcher.requestRefresh(); manifestFetcher.requestRefresh();
} }
...@@ -264,7 +250,14 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -264,7 +250,14 @@ public class SmoothStreamingChunkSource implements ChunkSource {
} }
evaluation.queueSize = queue.size(); evaluation.queueSize = queue.size();
formatEvaluator.evaluate(queue, playbackPositionUs, formats, evaluation); if (enabledTrack.isAdaptive()) {
adaptiveFormatEvaluator.evaluate(queue, playbackPositionUs, enabledTrack.adaptiveFormats,
evaluation);
} else {
evaluation.format = enabledTrack.fixedFormat;
evaluation.trigger = Chunk.TRIGGER_MANUAL;
}
Format selectedFormat = evaluation.format; Format selectedFormat = evaluation.format;
out.queueSize = evaluation.queueSize; out.queueSize = evaluation.queueSize;
...@@ -281,17 +274,17 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -281,17 +274,17 @@ public class SmoothStreamingChunkSource implements ChunkSource {
// In all cases where we return before instantiating a new chunk, we want out.chunk to be null. // In all cases where we return before instantiating a new chunk, we want out.chunk to be null.
out.chunk = null; out.chunk = null;
StreamElement streamElement = getElement(currentManifest); StreamElement streamElement = currentManifest.streamElements[enabledTrack.elementIndex];
if (streamElement.chunkCount == 0) { if (streamElement.chunkCount == 0) {
// The manifest is currently empty for this stream. // The manifest is currently empty for this stream.
finishedCurrentManifest = true; currentManifestFinished = true;
return; return;
} }
int chunkIndex; int chunkIndex;
if (queue.isEmpty()) { if (queue.isEmpty()) {
if (currentManifest.isLive) { if (currentManifest.isLive) {
seekPositionUs = getLiveSeekPosition(); seekPositionUs = getLiveSeekPosition(currentManifest, liveEdgeLatencyUs);
} }
chunkIndex = streamElement.getChunkIndex(seekPositionUs); chunkIndex = streamElement.getChunkIndex(seekPositionUs);
} else { } else {
...@@ -306,12 +299,12 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -306,12 +299,12 @@ public class SmoothStreamingChunkSource implements ChunkSource {
return; return;
} else if (chunkIndex >= streamElement.chunkCount) { } else if (chunkIndex >= streamElement.chunkCount) {
// This is beyond the last chunk in the current manifest. // This is beyond the last chunk in the current manifest.
finishedCurrentManifest = true; currentManifestFinished = true;
return; return;
} else if (chunkIndex == streamElement.chunkCount - 1) { } else if (chunkIndex == streamElement.chunkCount - 1) {
// This is the last chunk in the current manifest. Mark the manifest as being finished, // This is the last chunk in the current manifest. Mark the manifest as being finished,
// but continue to return the final chunk. // but continue to return the final chunk.
finishedCurrentManifest = true; currentManifestFinished = true;
} }
} }
...@@ -326,24 +319,18 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -326,24 +319,18 @@ public class SmoothStreamingChunkSource implements ChunkSource {
: chunkStartTimeUs + streamElement.getChunkDurationUs(chunkIndex); : chunkStartTimeUs + streamElement.getChunkDurationUs(chunkIndex);
int currentAbsoluteChunkIndex = chunkIndex + currentManifestChunkOffset; int currentAbsoluteChunkIndex = chunkIndex + currentManifestChunkOffset;
int trackIndex = getTrackIndex(selectedFormat); int manifestTrackIndex = getManifestTrackIndex(streamElement, selectedFormat);
Uri uri = streamElement.buildRequestUri(trackIndex, chunkIndex); int manifestTrackKey = getManifestTrackKey(enabledTrack.elementIndex, manifestTrackIndex);
Chunk mediaChunk = newMediaChunk(selectedFormat, uri, null, extractorWrappers.get(trackIndex), Uri uri = streamElement.buildRequestUri(manifestTrackIndex, chunkIndex);
Chunk mediaChunk = newMediaChunk(selectedFormat, uri, null,
extractorWrappers.get(manifestTrackKey),
drmInitData, dataSource, currentAbsoluteChunkIndex, isLastChunk, chunkStartTimeUs, drmInitData, dataSource, currentAbsoluteChunkIndex, isLastChunk, chunkStartTimeUs,
chunkEndTimeUs, evaluation.trigger, mediaFormats.get(trackIndex)); chunkEndTimeUs, evaluation.trigger,
mediaFormats.get(manifestTrackKey));
out.chunk = mediaChunk; out.chunk = mediaChunk;
} }
@Override @Override
public void maybeThrowError() throws IOException {
if (fatalError != null) {
throw fatalError;
} else {
manifestFetcher.maybeThrowError();
}
}
@Override
public void onChunkLoadCompleted(Chunk chunk) { public void onChunkLoadCompleted(Chunk chunk) {
// Do nothing. // Do nothing.
} }
...@@ -353,16 +340,120 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -353,16 +340,120 @@ public class SmoothStreamingChunkSource implements ChunkSource {
// Do nothing. // Do nothing.
} }
@Override
public void disable(List<? extends MediaChunk> queue) {
if (enabledTrack.isAdaptive()) {
adaptiveFormatEvaluator.disable();
}
if (manifestFetcher != null) {
manifestFetcher.disable();
}
}
// SmoothStreamingTrackSelector.Output implementation.
@Override
public void adaptiveTrack(SmoothStreamingManifest manifest, int element, int[] trackIndices) {
if (adaptiveFormatEvaluator == null) {
// Do nothing.
return;
}
MediaFormat maxHeightMediaFormat = null;
StreamElement streamElement = manifest.streamElements[element];
int maxWidth = -1;
int maxHeight = -1;
Format[] formats = new Format[trackIndices.length];
for (int i = 0; i < formats.length; i++) {
int manifestTrackIndex = trackIndices[i];
formats[i] = streamElement.tracks[manifestTrackIndex].format;
MediaFormat mediaFormat = initManifestTrack(manifest, element, manifestTrackIndex);
if (maxHeightMediaFormat == null || mediaFormat.height > maxHeight) {
maxHeightMediaFormat = mediaFormat;
}
maxWidth = Math.max(maxWidth, mediaFormat.width);
maxHeight = Math.max(maxHeight, mediaFormat.height);
}
Arrays.sort(formats, new DecreasingBandwidthComparator());
MediaFormat adaptiveMediaFormat = maxHeightMediaFormat.copyWithAdaptive(true);
tracks.add(new ExposedTrack(adaptiveMediaFormat, element, formats, maxWidth, maxHeight));
}
@Override
public void fixedTrack(SmoothStreamingManifest manifest, int element, int trackIndex) {
MediaFormat mediaFormat = initManifestTrack(manifest, element, trackIndex);
Format format = manifest.streamElements[element].tracks[trackIndex].format;
tracks.add(new ExposedTrack(mediaFormat, element, format));
}
// Private methods.
private MediaFormat initManifestTrack(SmoothStreamingManifest manifest, int elementIndex,
int trackIndex) {
int manifestTrackKey = getManifestTrackKey(elementIndex, trackIndex);
MediaFormat mediaFormat = mediaFormats.get(manifestTrackKey);
if (mediaFormat != null) {
// Already initialized.
return mediaFormat;
}
// Build the media format.
long durationUs = manifest.durationUs;
StreamElement element = manifest.streamElements[elementIndex];
Format format = element.tracks[trackIndex].format;
byte[][] csdArray = element.tracks[trackIndex].csd;
int mp4TrackType;
switch (element.type) {
case StreamElement.TYPE_VIDEO:
mediaFormat = MediaFormat.createVideoFormat(format.mimeType, MediaFormat.NO_VALUE,
durationUs, format.width, format.height, 0, Arrays.asList(csdArray));
mp4TrackType = Track.TYPE_vide;
break;
case StreamElement.TYPE_AUDIO:
List<byte[]> csd;
if (csdArray != null) {
csd = Arrays.asList(csdArray);
} else {
csd = Collections.singletonList(CodecSpecificDataUtil.buildAacAudioSpecificConfig(
format.audioSamplingRate, format.audioChannels));
}
mediaFormat = MediaFormat.createAudioFormat(format.mimeType, MediaFormat.NO_VALUE,
durationUs, format.audioChannels, format.audioSamplingRate, csd);
mp4TrackType = Track.TYPE_soun;
break;
case StreamElement.TYPE_TEXT:
mediaFormat = MediaFormat.createTextFormat(format.mimeType, format.language, durationUs);
mp4TrackType = Track.TYPE_text;
break;
default:
throw new IllegalStateException("Invalid type: " + element.type);
}
// Build the extractor.
FragmentedMp4Extractor mp4Extractor = new FragmentedMp4Extractor(
FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME);
Track mp4Track = new Track(trackIndex, mp4TrackType, element.timescale, durationUs, mediaFormat,
trackEncryptionBoxes, mp4TrackType == Track.TYPE_vide ? 4 : -1);
mp4Extractor.setTrack(mp4Track);
// Store the format and a wrapper around the extractor.
mediaFormats.put(manifestTrackKey, mediaFormat);
extractorWrappers.put(manifestTrackKey, new ChunkExtractorWrapper(mp4Extractor));
return mediaFormat;
}
/** /**
* For live playbacks, determines the seek position that snaps playback to be * For live playbacks, determines the seek position that snaps playback to be
* {@link #liveEdgeLatencyUs} behind the live edge of the current manifest * {@code liveEdgeLatencyUs} behind the live edge of the provided manifest.
* *
* @param manifest The manifest.
* @param liveEdgeLatencyUs The live edge latency, in microseconds.
* @return The seek position in microseconds. * @return The seek position in microseconds.
*/ */
private long getLiveSeekPosition() { private static long getLiveSeekPosition(SmoothStreamingManifest manifest,
long liveEdgeLatencyUs) {
long liveEdgeTimestampUs = Long.MIN_VALUE; long liveEdgeTimestampUs = Long.MIN_VALUE;
for (int i = 0; i < currentManifest.streamElements.length; i++) { for (int i = 0; i < manifest.streamElements.length; i++) {
StreamElement streamElement = currentManifest.streamElements[i]; StreamElement streamElement = manifest.streamElements[i];
if (streamElement.chunkCount > 0) { if (streamElement.chunkCount > 0) {
long elementLiveEdgeTimestampUs = long elementLiveEdgeTimestampUs =
streamElement.getStartTimeUs(streamElement.chunkCount - 1) streamElement.getStartTimeUs(streamElement.chunkCount - 1)
...@@ -373,12 +464,8 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -373,12 +464,8 @@ public class SmoothStreamingChunkSource implements ChunkSource {
return liveEdgeTimestampUs - liveEdgeLatencyUs; return liveEdgeTimestampUs - liveEdgeLatencyUs;
} }
private StreamElement getElement(SmoothStreamingManifest manifest) { private static int getManifestTrackIndex(StreamElement element, Format format) {
return manifest.streamElements[streamElementIndex]; TrackElement[] tracks = element.tracks;
}
private int getTrackIndex(Format format) {
TrackElement[] tracks = currentManifest.streamElements[streamElementIndex].tracks;
for (int i = 0; i < tracks.length; i++) { for (int i = 0; i < tracks.length; i++) {
if (tracks[i].format.equals(format)) { if (tracks[i].format.equals(format)) {
return i; return i;
...@@ -388,31 +475,6 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -388,31 +475,6 @@ public class SmoothStreamingChunkSource implements ChunkSource {
throw new IllegalStateException("Invalid format: " + format); throw new IllegalStateException("Invalid format: " + format);
} }
private static MediaFormat getMediaFormat(StreamElement streamElement, int trackIndex) {
TrackElement trackElement = streamElement.tracks[trackIndex];
Format trackFormat = trackElement.format;
String mimeType = trackFormat.mimeType;
if (streamElement.type == StreamElement.TYPE_VIDEO) {
MediaFormat format = MediaFormat.createVideoFormat(mimeType, MediaFormat.NO_VALUE,
trackFormat.width, trackFormat.height, Arrays.asList(trackElement.csd));
return format;
} else if (streamElement.type == StreamElement.TYPE_AUDIO) {
List<byte[]> csd;
if (trackElement.csd != null) {
csd = Arrays.asList(trackElement.csd);
} else {
csd = Collections.singletonList(CodecSpecificDataUtil.buildAacAudioSpecificConfig(
trackFormat.audioSamplingRate, trackFormat.audioChannels));
}
MediaFormat format = MediaFormat.createAudioFormat(mimeType, MediaFormat.NO_VALUE,
trackFormat.audioChannels, trackFormat.audioSamplingRate, csd);
return format;
} else if (streamElement.type == StreamElement.TYPE_TEXT) {
return MediaFormat.createTextFormat(trackFormat.mimeType, trackFormat.language);
}
return null;
}
private static MediaChunk newMediaChunk(Format formatInfo, Uri uri, String cacheKey, private static MediaChunk newMediaChunk(Format formatInfo, Uri uri, String cacheKey,
ChunkExtractorWrapper extractorWrapper, DrmInitData drmInitData, DataSource dataSource, ChunkExtractorWrapper extractorWrapper, DrmInitData drmInitData, DataSource dataSource,
int chunkIndex, boolean isLast, long chunkStartTimeUs, long chunkEndTimeUs, int chunkIndex, boolean isLast, long chunkStartTimeUs, long chunkEndTimeUs,
...@@ -426,7 +488,12 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -426,7 +488,12 @@ public class SmoothStreamingChunkSource implements ChunkSource {
drmInitData, true); drmInitData, true);
} }
private static byte[] getKeyId(byte[] initData) { private static int getManifestTrackKey(int elementIndex, int trackIndex) {
Assertions.checkState(elementIndex <= 65536 && trackIndex <= 65536);
return (elementIndex << 16) | trackIndex;
}
private static byte[] getProtectionElementKeyId(byte[] initData) {
StringBuilder initDataStringBuilder = new StringBuilder(); StringBuilder initDataStringBuilder = new StringBuilder();
for (int i = 0; i < initData.length; i += 2) { for (int i = 0; i < initData.length; i += 2) {
initDataStringBuilder.append((char) initData[i]); initDataStringBuilder.append((char) initData[i]);
...@@ -448,4 +515,45 @@ public class SmoothStreamingChunkSource implements ChunkSource { ...@@ -448,4 +515,45 @@ public class SmoothStreamingChunkSource implements ChunkSource {
data[secondPosition] = temp; data[secondPosition] = temp;
} }
// Private classes.
private static final class ExposedTrack {
public final MediaFormat format;
private final int elementIndex;
// Non-adaptive track variables.
private final Format fixedFormat;
// Adaptive track variables.
private final Format[] adaptiveFormats;
private final int adaptiveMaxWidth;
private final int adaptiveMaxHeight;
public ExposedTrack(MediaFormat format, int elementIndex, Format fixedFormat) {
this.format = format;
this.elementIndex = elementIndex;
this.fixedFormat = fixedFormat;
this.adaptiveFormats = null;
this.adaptiveMaxWidth = -1;
this.adaptiveMaxHeight = -1;
}
public ExposedTrack(MediaFormat format, int elementIndex, Format[] adaptiveFormats,
int maxWidth, int maxHeight) {
this.format = format;
this.elementIndex = elementIndex;
this.adaptiveFormats = adaptiveFormats;
this.adaptiveMaxWidth = maxWidth;
this.adaptiveMaxHeight = maxHeight;
this.fixedFormat = null;
}
public boolean isAdaptive() {
return adaptiveFormats != null;
}
}
} }
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.smoothstreaming;
import java.io.IOException;
/**
* Specifies a track selection from a {@link SmoothStreamingManifest}.
*/
public interface SmoothStreamingTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified tracks in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the adaptive tracks are located.
* @param tracks The indices of the tracks within the element.
*/
void adaptiveTrack(SmoothStreamingManifest manifest, int element, int[] tracks);
/**
* Outputs a fixed track corresponding to the specified track in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the adaptive tracks are located.
* @param track The index of the track within the element.
*/
void fixedTrack(SmoothStreamingManifest manifest, int element, int track);
}
/**
* Outputs a track selection for a given manifest.
*
* @param manifest The manifest to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the manifest.
*/
void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment