Commit be471489 by olly Committed by Oliver Woodman

ExoPlayer V2 Refactor - Step 4

Notes:
1. The logic in ExoPlayerImplInternal is very temporary, until we
   have proper TrackSelector implementations. Ignore the fact that
   it's crazy and has loads of nesting.
2. This change removes all capabilities checking. TrackRenderer
   implementations will be updated to perform these checks in a
   subsequent CL.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=113151233
parent 6cb20525
Showing with 256 additions and 695 deletions
......@@ -22,7 +22,7 @@ import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.DefaultDashTrackSelector;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser;
import com.google.android.exoplayer.dash.mpd.UtcTimingElement;
......@@ -201,8 +201,7 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the video renderer.
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newVideoInstance(context, true, false),
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_VIDEO,
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
......@@ -211,18 +210,18 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_AUDIO,
audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
// Build the text renderer.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_TEXT,
textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player,
DemoPlayer.TYPE_TEXT);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
......
......@@ -18,7 +18,6 @@ package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder;
import com.google.android.exoplayer.hls.DefaultHlsTrackSelector;
import com.google.android.exoplayer.hls.HlsChunkSource;
import com.google.android.exoplayer.hls.HlsPlaylist;
import com.google.android.exoplayer.hls.HlsPlaylistParser;
......@@ -120,9 +119,8 @@ public class HlsSourceBuilder implements SourceBuilder {
// Build the video/audio/metadata renderers.
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
HlsChunkSource chunkSource = new HlsChunkSource(true /* isMaster */, dataSource, url,
manifest, DefaultHlsTrackSelector.newDefaultInstance(context), bandwidthMeter,
timestampAdjusterProvider, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
HlsChunkSource chunkSource = new HlsChunkSource(HlsChunkSource.TYPE_DEFAULT, dataSource, url,
manifest, bandwidthMeter, timestampAdjusterProvider, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
MAIN_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
......
......@@ -23,7 +23,6 @@ import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder;
import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.smoothstreaming.DefaultSmoothStreamingTrackSelector;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser;
......@@ -152,7 +151,7 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the video renderer.
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false),
SmoothStreamingManifest.StreamElement.TYPE_VIDEO,
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
......@@ -161,20 +160,18 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newAudioInstance(),
audioDataSource, null, LIVE_EDGE_LATENCY_MS);
SmoothStreamingManifest.StreamElement.TYPE_AUDIO, audioDataSource, null,
LIVE_EDGE_LATENCY_MS);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO);
// Build the text renderer.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newTextInstance(),
textDataSource, null, LIVE_EDGE_LATENCY_MS);
SmoothStreamingManifest.StreamElement.TYPE_TEXT, textDataSource, null,
LIVE_EDGE_LATENCY_MS);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT);
// Invoke the callback.
player.onSource(
......
......@@ -81,10 +81,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
}
public void testGetAvailableRangeOnVod() {
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(),
DefaultDashTrackSelector.newVideoInstance(null, false, false), null, null);
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), AdaptationSet.TYPE_VIDEO, null,
null);
chunkSource.prepare();
chunkSource.enable(0);
chunkSource.enable(new int[] {0});
TimeRange availableRange = chunkSource.getAvailableRange();
checkAvailableRange(availableRange, 0, VOD_DURATION_MS * 1000);
......@@ -103,9 +103,9 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
public void testGetAvailableRangeOnMultiPeriodVod() {
DashChunkSource chunkSource = new DashChunkSource(buildMultiPeriodVodMpd(),
DefaultDashTrackSelector.newVideoInstance(null, false, false), null, null);
AdaptationSet.TYPE_VIDEO, null, null);
chunkSource.prepare();
chunkSource.enable(0);
chunkSource.enable(new int[] {0});
TimeRange availableRange = chunkSource.getAvailableRange();
checkAvailableRange(availableRange, 0, MULTI_PERIOD_VOD_DURATION_MS * 1000);
}
......@@ -118,11 +118,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
}
public void testSegmentIndexInitializationOnVod() {
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(),
DefaultDashTrackSelector.newVideoInstance(null, false, false), mock(DataSource.class),
null);
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), AdaptationSet.TYPE_VIDEO,
mock(DataSource.class), null);
chunkSource.prepare();
chunkSource.enable(0);
chunkSource.enable(new int[] {0});
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
......@@ -322,12 +321,12 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
ManifestFetcher<MediaPresentationDescription> manifestFetcher = mock(ManifestFetcher.class);
when(manifestFetcher.getManifest()).thenReturn(mpd);
DashChunkSource chunkSource = new DashChunkSource(manifestFetcher, mpd,
DefaultDashTrackSelector.newVideoInstance(null, false, false), mock(DataSource.class), null,
AdaptationSet.TYPE_VIDEO, mock(DataSource.class), null,
new FakeClock(mpd.availabilityStartTime + mpd.duration - ELAPSED_REALTIME_OFFSET_MS),
liveEdgeLatencyMs * 1000, ELAPSED_REALTIME_OFFSET_MS * 1000, startAtLiveEdge, null, null,
0);
chunkSource.prepare();
chunkSource.enable(0);
chunkSource.enable(new int[] {0});
return chunkSource;
}
......
......@@ -13,4 +13,4 @@
# Project target.
target=android-23
android.library=false
android.library.reference.1=../experimental
android.library.reference.1=../main
......@@ -75,7 +75,8 @@ import java.util.concurrent.atomic.AtomicInteger;
private final long minBufferUs;
private final long minRebufferUs;
private final List<TrackRenderer> enabledRenderers;
private final int[][] trackIndices;
private final int[][] groupIndices;
private final int[][][] trackIndices;
private final int[] selectedTrackIndices;
private final Handler handler;
private final HandlerThread internalPlaybackThread;
......@@ -125,7 +126,8 @@ import java.util.concurrent.atomic.AtomicInteger;
standaloneMediaClock = new StandaloneMediaClock();
pendingSeekCount = new AtomicInteger();
enabledRenderers = new ArrayList<>(renderers.length);
trackIndices = new int[renderers.length][];
groupIndices = new int[renderers.length][];
trackIndices = new int[renderers.length][][];
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect.
internalPlaybackThread = new PriorityHandlerThread("ExoPlayerImplInternal:Handler",
......@@ -301,24 +303,56 @@ import java.util.concurrent.atomic.AtomicInteger;
boolean allRenderersEnded = true;
boolean allRenderersReadyOrEnded = true;
// Establish the mapping from renderer to track index (trackIndices), and build a list of
// formats corresponding to each renderer (trackFormats).
int trackCount = source.getTrackCount();
boolean[] trackMappedFlags = new boolean[trackCount];
// The maximum number of tracks that one renderer can support is the total number of tracks in
// all groups, plus possibly one adaptive track per group.
int maxTrackCount = source.getTrackGroupCount();
for (int groupIndex = 0; groupIndex < source.getTrackGroupCount(); groupIndex++) {
maxTrackCount += source.getTrackGroup(groupIndex).length;
}
// Construct tracks for each renderer.
MediaFormat[][] trackFormats = new MediaFormat[renderers.length][];
for (int rendererIndex = 0; rendererIndex < renderers.length; rendererIndex++) {
TrackRenderer renderer = renderers[rendererIndex];
int rendererTrackCount = 0;
int[] rendererTrackIndices = new int[trackCount];
MediaFormat[] rendererTrackFormats = new MediaFormat[trackCount];
for (int trackIndex = 0; trackIndex < trackCount; trackIndex++) {
MediaFormat trackFormat = source.getFormat(trackIndex);
if (!trackMappedFlags[trackIndex] && renderer.handlesTrack(trackFormat)) {
trackMappedFlags[trackIndex] = true;
rendererTrackIndices[rendererTrackCount] = trackIndex;
rendererTrackFormats[rendererTrackCount++] = trackFormat;
int[] rendererTrackGroups = new int[maxTrackCount];
int[][] rendererTrackIndices = new int[maxTrackCount][];
MediaFormat[] rendererTrackFormats = new MediaFormat[maxTrackCount];
for (int groupIndex = 0; groupIndex < source.getTrackGroupCount(); groupIndex++) {
TrackGroup trackGroup = source.getTrackGroup(groupIndex);
// TODO[REFACTOR]: This should check that the renderer is capable of adaptive playback, in
// addition to checking that the group is adaptive.
if (trackGroup.adaptive) {
// Try and build an adaptive track.
int adaptiveTrackIndexCount = 0;
int[] adaptiveTrackIndices = new int[trackGroup.length];
MediaFormat adaptiveTrackFormat = null;
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
MediaFormat trackFormat = source.getTrackGroup(groupIndex).getFormat(trackIndex);
if (renderer.handlesTrack(trackFormat)) {
adaptiveTrackIndices[adaptiveTrackIndexCount++] = trackIndex;
if (adaptiveTrackFormat == null) {
adaptiveTrackFormat = trackFormat.copyAsAdaptive("auto");
}
}
}
if (adaptiveTrackIndexCount > 1) {
// We succeeded in building an adaptive track.
rendererTrackGroups[rendererTrackCount] = groupIndex;
rendererTrackIndices[rendererTrackCount] =
Arrays.copyOf(adaptiveTrackIndices, adaptiveTrackIndexCount);
rendererTrackFormats[rendererTrackCount++] = adaptiveTrackFormat;
}
}
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
MediaFormat trackFormat = source.getTrackGroup(groupIndex).getFormat(trackIndex);
if (renderer.handlesTrack(trackFormat)) {
rendererTrackGroups[rendererTrackCount] = groupIndex;
rendererTrackIndices[rendererTrackCount] = new int[] {trackIndex};
rendererTrackFormats[rendererTrackCount++] = trackFormat;
}
}
}
groupIndices[rendererIndex] = Arrays.copyOf(rendererTrackGroups, rendererTrackCount);
trackIndices[rendererIndex] = Arrays.copyOf(rendererTrackIndices, rendererTrackCount);
trackFormats[rendererIndex] = Arrays.copyOf(rendererTrackFormats, rendererTrackCount);
}
......@@ -328,8 +362,8 @@ import java.util.concurrent.atomic.AtomicInteger;
TrackRenderer renderer = renderers[rendererIndex];
int trackIndex = selectedTrackIndices[rendererIndex];
if (0 <= trackIndex && trackIndex < trackIndices[rendererIndex].length) {
int sourceTrackIndex = trackIndices[rendererIndex][trackIndex];
TrackStream trackStream = source.enable(sourceTrackIndex, positionUs);
TrackStream trackStream = source.enable(groupIndices[rendererIndex][trackIndex],
trackIndices[rendererIndex][trackIndex], positionUs);
renderer.enable(trackStream, positionUs, false);
enabledRenderers.add(renderer);
allRenderersEnded = allRenderersEnded && renderer.isEnded();
......@@ -606,8 +640,8 @@ import java.util.concurrent.atomic.AtomicInteger;
boolean playing = playWhenReady && state == ExoPlayer.STATE_READY;
// Consider as joining if the renderer was previously disabled, but not when switching tracks.
boolean joining = !isEnabled && playing;
int sourceTrackIndex = trackIndices[rendererIndex][trackIndex];
TrackStream trackStream = source.enable(sourceTrackIndex, positionUs);
TrackStream trackStream = source.enable(groupIndices[rendererIndex][trackIndex],
trackIndices[rendererIndex][trackIndex], positionUs);
renderer.enable(trackStream, positionUs, joining);
enabledRenderers.add(renderer);
if (playing) {
......
......@@ -71,14 +71,14 @@ public final class FrameworkSampleSource implements SampleSource {
private final long fileDescriptorOffset;
private final long fileDescriptorLength;
private MediaExtractor extractor;
private MediaFormat[] trackFormats;
private boolean prepared;
private long durationUs;
private int enabledTrackCount;
private MediaExtractor extractor;
private TrackGroup[] tracks;
private int[] trackStates;
private boolean[] pendingResets;
private int enabledTrackCount;
private long lastSeekPositionUs;
private long pendingSeekPositionUs;
......@@ -132,10 +132,11 @@ public final class FrameworkSampleSource implements SampleSource {
durationUs = C.UNKNOWN_TIME_US;
trackStates = new int[extractor.getTrackCount()];
pendingResets = new boolean[trackStates.length];
trackFormats = new MediaFormat[trackStates.length];
tracks = new TrackGroup[trackStates.length];
for (int i = 0; i < trackStates.length; i++) {
trackFormats[i] = createMediaFormat(extractor.getTrackFormat(i));
long trackDurationUs = trackFormats[i].durationUs;
MediaFormat format = createMediaFormat(extractor.getTrackFormat(i));
tracks[i] = new TrackGroup(format);
long trackDurationUs = format.durationUs;
if (trackDurationUs > durationUs) {
durationUs = trackDurationUs;
}
......@@ -155,15 +156,13 @@ public final class FrameworkSampleSource implements SampleSource {
}
@Override
public int getTrackCount() {
Assertions.checkState(prepared);
return trackStates.length;
public int getTrackGroupCount() {
return tracks.length;
}
@Override
public MediaFormat getFormat(int track) {
Assertions.checkState(prepared);
return trackFormats[track];
public TrackGroup getTrackGroup(int group) {
return tracks[group];
}
@Override
......@@ -172,14 +171,14 @@ public final class FrameworkSampleSource implements SampleSource {
}
@Override
public TrackStream enable(int track, long positionUs) {
public TrackStream enable(int group, int[] track, long positionUs) {
Assertions.checkState(prepared);
Assertions.checkState(trackStates[track] == TRACK_STATE_DISABLED);
Assertions.checkState(trackStates[group] == TRACK_STATE_DISABLED);
enabledTrackCount++;
trackStates[track] = TRACK_STATE_ENABLED;
extractor.selectTrack(track);
trackStates[group] = TRACK_STATE_ENABLED;
extractor.selectTrack(group);
seekToUsInternal(positionUs, positionUs != 0);
return new TrackStreamImpl(track);
return new TrackStreamImpl(group);
}
/* package */ long readReset(int track) {
......@@ -197,7 +196,7 @@ public final class FrameworkSampleSource implements SampleSource {
return TrackStream.NOTHING_READ;
}
if (trackStates[track] != TRACK_STATE_FORMAT_SENT) {
formatHolder.format = trackFormats[track];
formatHolder.format = tracks[track].getFormat(0);
formatHolder.drmInitData = Util.SDK_INT >= 18 ? getDrmInitDataV18() : null;
trackStates[track] = TRACK_STATE_FORMAT_SENT;
return TrackStream.FORMAT_READ;
......
......@@ -28,7 +28,6 @@ import android.text.TextUtils;
import android.util.Log;
import android.util.Pair;
import java.io.IOException;
import java.util.HashMap;
/**
......@@ -43,8 +42,7 @@ public final class MediaCodecUtil {
* Such failures are not expected in normal operation and are normally temporary (e.g. if the
* mediaserver process has crashed and is yet to restart).
*/
// TODO[REFACTOR]: Shouldn't implement IOException.
public static class DecoderQueryException extends IOException {
public static class DecoderQueryException extends Exception {
private DecoderQueryException(Throwable cause) {
super("Failed to query underlying media codecs", cause);
......
......@@ -15,6 +15,8 @@
*/
package com.google.android.exoplayer;
import android.util.Pair;
import java.io.IOException;
/**
......@@ -26,8 +28,7 @@ public class MultiSampleSource implements SampleSource {
private boolean prepared;
private long durationUs;
private SampleSource[] trackSources;
private int[] trackIndices;
private TrackGroup[] tracks;
public MultiSampleSource(SampleSource... sources) {
this.sources = sources;
......@@ -45,21 +46,19 @@ public class MultiSampleSource implements SampleSource {
if (prepared) {
this.prepared = true;
this.durationUs = C.UNKNOWN_TIME_US;
int trackCount = 0;
int totalTrackGroupCount = 0;
for (int i = 0; i < sources.length; i++) {
trackCount += sources[i].getTrackCount();
totalTrackGroupCount += sources[i].getTrackGroupCount();
if (sources[i].getDurationUs() > durationUs) {
durationUs = sources[i].getDurationUs();
}
}
trackSources = new SampleSource[trackCount];
trackIndices = new int[trackCount];
int index = 0;
tracks = new TrackGroup[totalTrackGroupCount];
int trackGroupIndex = 0;
for (int i = 0; i < sources.length; i++) {
int thisSourceTrackCount = sources[i].getTrackCount();
for (int j = 0; j < thisSourceTrackCount; j++) {
trackSources[index] = sources[i];
trackIndices[index++] = j;
int sourceTrackGroupCount = sources[i].getTrackGroupCount();
for (int j = 0; j < sourceTrackGroupCount; j++) {
tracks[trackGroupIndex++] = sources[i].getTrackGroup(j);
}
}
}
......@@ -72,18 +71,19 @@ public class MultiSampleSource implements SampleSource {
}
@Override
public int getTrackCount() {
return trackSources.length;
public int getTrackGroupCount() {
return tracks.length;
}
@Override
public MediaFormat getFormat(int track) {
return trackSources[track].getFormat(trackIndices[track]);
public TrackGroup getTrackGroup(int group) {
return tracks[group];
}
@Override
public TrackStream enable(int track, long positionUs) {
return trackSources[track].enable(trackIndices[track], positionUs);
public TrackStream enable(int group, int[] tracks, long positionUs) {
Pair<Integer, Integer> sourceAndGroup = getSourceAndTrackGroupIndices(group);
return sources[sourceAndGroup.first].enable(sourceAndGroup.second, tracks, positionUs);
}
@Override
......@@ -129,4 +129,16 @@ public class MultiSampleSource implements SampleSource {
prepared = false;
}
private Pair<Integer, Integer> getSourceAndTrackGroupIndices(int group) {
int totalTrackGroupCount = 0;
for (int i = 0; i < sources.length; i++) {
int sourceTrackGroupCount = sources[i].getTrackGroupCount();
if (group < totalTrackGroupCount + sourceTrackGroupCount) {
return Pair.create(i, group - totalTrackGroupCount);
}
totalTrackGroupCount += sourceTrackGroupCount;
}
throw new IndexOutOfBoundsException();
}
}
......@@ -19,10 +19,6 @@ import java.io.IOException;
/**
* A source of media.
* <p>
* A {@link SampleSource} may expose one or multiple tracks. The number of tracks and each track's
* media format can be queried using {@link #getTrackCount()} and {@link #getFormat(int)}
* respectively.
*/
public interface SampleSource {
......@@ -56,30 +52,23 @@ public interface SampleSource {
long getDurationUs();
/**
* Returns the number of tracks exposed by the source.
* Returns the number of track groups exposed by the source.
* <p>
* This method should only be called after the source has been prepared.
*
* @return The number of tracks.
* @return The number of track groups exposed by the source.
*/
int getTrackCount();
public int getTrackGroupCount();
/**
* Returns the format of the specified track.
* <p>
* Note that whilst the format of a track will remain constant, the format of the actual media
* stream may change dynamically. An example of this is where the track is adaptive (i.e.
* {@link MediaFormat#adaptive} is true). Hence the track formats returned through this method
* should not be used to configure decoders. Decoder configuration should be performed using the
* formats obtained when reading the media stream through calls to
* {@link TrackStream#readData(MediaFormatHolder, SampleHolder)}.
* Returns the {@link TrackGroup} at the specified index.
* <p>
* This method should only be called after the source has been prepared.
*
* @param track The track index.
* @return The format of the specified track.
* @int group The group index.
* @return The corresponding {@link TrackGroup}.
*/
MediaFormat getFormat(int track);
public TrackGroup getTrackGroup(int group);
/**
* Indicates to the source that it should continue buffering data for its enabled tracks.
......@@ -112,17 +101,19 @@ public interface SampleSource {
void seekToUs(long positionUs);
/**
* Enables the specified track. Returning a {@link TrackStream} from which the track's data can
* be read.
* Enables the specified group to read the specified tracks. A {@link TrackStream} is returned
* through which the enabled track's data can be read.
* <p>
* This method should only be called after the source has been prepared, and when the specified
* track is disabled.
* group is disabled. Note that {@code tracks.length} is only permitted to be greater than one
* if {@link TrackGroup#adaptive} is true for the group.
*
* @param track The track to enable.
* @param group The group index.
* @param tracks The track indices.
* @param positionUs The current playback position in microseconds.
* @return A {@link TrackStream} from which the enabled track's data can be read.
*/
TrackStream enable(int track, long positionUs);
public TrackStream enable(int group, int[] tracks, long positionUs);
/**
* Releases the source.
......
......@@ -52,6 +52,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
private final DataSource dataSource;
private final MediaFormat format;
private final int minLoadableRetryCount;
private final TrackGroup tracks;
private int state;
private byte[] sampleData;
......@@ -73,6 +74,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
this.dataSource = dataSource;
this.format = format;
this.minLoadableRetryCount = minLoadableRetryCount;
tracks = new TrackGroup(format);
sampleData = new byte[INITIAL_SAMPLE_SIZE];
}
......@@ -102,17 +104,17 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
}
@Override
public int getTrackCount() {
public int getTrackGroupCount() {
return 1;
}
@Override
public MediaFormat getFormat(int track) {
return format;
public TrackGroup getTrackGroup(int group) {
return tracks;
}
@Override
public TrackStream enable(int track, long positionUs) {
public TrackStream enable(int group, int[] tracks, long positionUs) {
state = STATE_SEND_FORMAT;
clearCurrentLoadableException();
maybeStartLoading();
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer;
import com.google.android.exoplayer.SampleSource.TrackStream;
/**
* Defines a group of tracks exposed by a {@link SampleSource}.
* <p>
* A {@link SampleSource} is only able to provide one {@link TrackStream} corresponding to a group
* at any given time. If {@link #adaptive} is true this {@link TrackStream} can adapt between
* multiple tracks within the group. If {@link #adaptive} is false then it's only possible to
* consume one track from the group at a given time.
*/
public final class TrackGroup {
/**
* The number of tracks in the group.
*/
public final int length;
/**
* Whether it's possible to adapt between multiple tracks in the group.
*/
public final boolean adaptive;
private final MediaFormat[] formats;
/**
* @param format The format of the single track.
*/
public TrackGroup(MediaFormat format) {
this(false, format);
}
/**
* @param supportsAdaptive Whether it's possible to adapt between multiple tracks in the group.
* @param formats The track formats.
*/
public TrackGroup(boolean supportsAdaptive, MediaFormat... formats) {
this.adaptive = supportsAdaptive;
this.formats = formats;
length = formats.length;
}
/**
* Gets the format of the track at a given index.
*
* @param index The index of the track.
* @return The track's format.
*/
public MediaFormat getFormat(int index) {
return formats[index];
}
}
......@@ -22,6 +22,7 @@ import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.SampleSource.TrackStream;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.extractor.DefaultTrackOutput;
import com.google.android.exoplayer.upstream.Loader;
import com.google.android.exoplayer.upstream.Loader.Loadable;
......@@ -153,9 +154,11 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call
return false;
}
durationUs = C.UNKNOWN_TIME_US;
if (chunkSource.getTrackCount() > 0) {
loader = new Loader("Loader:" + chunkSource.getFormat(0).mimeType);
durationUs = chunkSource.getFormat(0).durationUs;
TrackGroup trackGroup = chunkSource.getTracks();
if (trackGroup.length > 0) {
MediaFormat firstTrackFormat = trackGroup.getFormat(0);
loader = new Loader("Loader:" + firstTrackFormat.mimeType);
durationUs = firstTrackFormat.durationUs;
}
state = STATE_PREPARED;
return true;
......@@ -172,23 +175,22 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call
}
@Override
public int getTrackCount() {
Assertions.checkState(state != STATE_IDLE);
return chunkSource.getTrackCount();
public int getTrackGroupCount() {
return 1;
}
@Override
public MediaFormat getFormat(int track) {
public TrackGroup getTrackGroup(int group) {
Assertions.checkState(state != STATE_IDLE);
return chunkSource.getFormat(track);
return chunkSource.getTracks();
}
@Override
public TrackStream enable(int track, long positionUs) {
public TrackStream enable(int group, int[] tracks, long positionUs) {
Assertions.checkState(state == STATE_PREPARED);
Assertions.checkState(enabledTrackCount++ == 0);
state = STATE_ENABLED;
chunkSource.enable(track);
chunkSource.enable(tracks);
loadControl.register(this, bufferSizeContribution);
downstreamFormat = null;
downstreamMediaFormat = null;
......
......@@ -15,7 +15,7 @@
*/
package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TrackGroup;
import java.io.IOException;
import java.util.List;
......@@ -48,33 +48,23 @@ public interface ChunkSource {
boolean prepare();
/**
* Returns the number of tracks exposed by the source.
* Gets the group of tracks provided by the source.
* <p>
* This method should only be called after the source has been prepared.
*
* @return The number of tracks.
* @return The track group.
*/
int getTrackCount();
TrackGroup getTracks();
/**
* Gets the format of the specified track.
* Enable the source for the specified tracks.
* <p>
* This method should only be called after the source has been prepared.
*
* @param track The track index.
* @return The format of the track.
*/
MediaFormat getFormat(int track);
/**
* Enable the source for the specified track.
* <p>
* This method should only be called after the source has been prepared, and when the source is
* This method should only be called after the source has been prepared and when the source is
* disabled.
*
* @param track The track index.
* @param tracks The track indices.
*/
void enable(int track);
void enable(int[] tracks);
/**
* Indicates to the source that it should still be checking for updates to the stream.
......
......@@ -17,6 +17,7 @@ package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
......@@ -34,7 +35,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
private final DataSpec dataSpec;
private final Format format;
private final long durationUs;
private final MediaFormat mediaFormat;
private final TrackGroup tracks;
/**
* @param dataSource A {@link DataSource} suitable for loading the sample data.
......@@ -50,7 +51,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
this.dataSpec = dataSpec;
this.format = format;
this.durationUs = durationUs;
this.mediaFormat = mediaFormat;
tracks = new TrackGroup(mediaFormat);
}
@Override
......@@ -59,17 +60,12 @@ public final class SingleSampleChunkSource implements ChunkSource {
}
@Override
public int getTrackCount() {
return 1;
public TrackGroup getTracks() {
return tracks;
}
@Override
public MediaFormat getFormat(int track) {
return mediaFormat;
}
@Override
public void enable(int track) {
public void enable(int[] tracks) {
// Do nothing.
}
......@@ -111,7 +107,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
private SingleSampleMediaChunk initChunk() {
return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_UNSPECIFIED, format, 0,
durationUs, 0, mediaFormat, null, Chunk.NO_PARENT_ID);
durationUs, 0, tracks.getFormat(0), null, Chunk.NO_PARENT_ID);
}
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import java.io.IOException;
/**
* Specifies a track selection from a {@link Period} of a media presentation description.
*/
public interface DashTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified representations in the specified
* adaptation set.
*
* @param manifest The media presentation description being processed.
* @param periodIndex The index of the period being processed.
* @param adaptationSetIndex The index of the adaptation set within which the representations
* are located.
* @param representationIndices The indices of the track within the element.
*/
void adaptiveTrack(MediaPresentationDescription manifest, int periodIndex,
int adaptationSetIndex, int[] representationIndices);
/**
* Outputs an fixed track corresponding to the specified representation in the specified
* adaptation set.
*
* @param manifest The media presentation description being processed.
* @param periodIndex The index of the period being processed.
* @param adaptationSetIndex The index of the adaptation set within which the track is located.
* @param representationIndex The index of the representation within the adaptation set.
*/
void fixedTrack(MediaPresentationDescription manifest, int periodIndex, int adaptationSetIndex,
int representationIndex);
}
/**
* Outputs a track selection for a given period.
*
* @param manifest the media presentation description to process.
* @param periodIndex The index of the period to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the period.
*/
void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output)
throws IOException;
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import com.google.android.exoplayer.util.Util;
import android.content.Context;
import java.io.IOException;
/**
* A default {@link DashTrackSelector} implementation.
*/
// TODO: Add more configuration options (e.g. ability to disable adaptive track output).
public final class DefaultDashTrackSelector implements DashTrackSelector {
private final int adaptationSetType;
private final Context context;
private final boolean filterVideoRepresentations;
private final boolean filterProtectedHdContent;
/**
* @param context A context. May be null if {@code filterVideoRepresentations == false}.
* @param filterVideoRepresentations Whether video representations should be filtered according to
* the capabilities of the device. It is strongly recommended to set this to {@code true},
* unless the application has already verified that all representations are playable.
* @param filterProtectedHdContent Whether video representations that are both drm protected and
* high definition should be filtered when tracks are built. If
* {@code filterVideoRepresentations == false} then this parameter is ignored.
*/
public static DefaultDashTrackSelector newVideoInstance(Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_VIDEO, context,
filterVideoRepresentations, filterProtectedHdContent);
}
public static DefaultDashTrackSelector newAudioInstance() {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_AUDIO, null, false, false);
}
public static DefaultDashTrackSelector newTextInstance() {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_TEXT, null, false, false);
}
private DefaultDashTrackSelector(int adaptationSetType, Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
this.adaptationSetType = adaptationSetType;
this.context = context;
this.filterVideoRepresentations = filterVideoRepresentations;
this.filterProtectedHdContent = filterProtectedHdContent;
}
@Override
public void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output)
throws IOException {
Period period = manifest.getPeriod(periodIndex);
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
if (adaptationSet.type == adaptationSetType) {
if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
int[] representations;
if (filterVideoRepresentations) {
representations = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, adaptationSet.representations, null,
filterProtectedHdContent && adaptationSet.hasContentProtection());
} else {
representations = Util.firstIntegersArray(adaptationSet.representations.size());
}
int representationCount = representations.length;
if (representationCount > 1) {
output.adaptiveTrack(manifest, periodIndex, i, representations);
}
for (int j = 0; j < representationCount; j++) {
output.fixedTrack(manifest, periodIndex, i, representations[j]);
}
} else {
for (int j = 0; j < adaptationSet.representations.size(); j++) {
output.fixedTrack(manifest, periodIndex, i, j);
}
}
}
}
}
}
......@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.upstream.Allocator;
import com.google.android.exoplayer.upstream.DataSource;
......@@ -174,7 +175,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
private boolean prepared;
private int enabledTrackCount;
private MediaFormat[] mediaFormats;
private TrackGroup[] tracks;
private long durationUs;
private boolean[] pendingMediaFormat;
private boolean[] pendingResets;
......@@ -262,14 +263,14 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
if (seekMap != null && tracksBuilt && haveFormatsForAllTracks()) {
int trackCount = sampleQueues.size();
tracks = new TrackGroup[trackCount];
trackEnabledStates = new boolean[trackCount];
pendingResets = new boolean[trackCount];
pendingMediaFormat = new boolean[trackCount];
mediaFormats = new MediaFormat[trackCount];
durationUs = C.UNKNOWN_TIME_US;
for (int i = 0; i < trackCount; i++) {
MediaFormat format = sampleQueues.valueAt(i).getFormat();
mediaFormats[i] = format;
tracks[i] = new TrackGroup(format);
if (format.durationUs > durationUs) {
durationUs = format.durationUs;
}
......@@ -292,24 +293,23 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
}
@Override
public int getTrackCount() {
return sampleQueues.size();
public int getTrackGroupCount() {
return tracks.length;
}
@Override
public MediaFormat getFormat(int track) {
Assertions.checkState(prepared);
return mediaFormats[track];
public TrackGroup getTrackGroup(int group) {
return tracks[group];
}
@Override
public TrackStream enable(int track, long positionUs) {
public TrackStream enable(int group, int[] tracks, long positionUs) {
Assertions.checkState(prepared);
Assertions.checkState(!trackEnabledStates[track]);
Assertions.checkState(!trackEnabledStates[group]);
enabledTrackCount++;
trackEnabledStates[track] = true;
pendingMediaFormat[track] = true;
pendingResets[track] = false;
trackEnabledStates[group] = true;
pendingMediaFormat[group] = true;
pendingResets[group] = false;
if (enabledTrackCount == 1) {
// Treat all enables in non-seekable media as being from t=0.
positionUs = !seekMap.isSeekable() ? 0 : positionUs;
......@@ -317,7 +317,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
lastSeekPositionUs = positionUs;
restartFrom(positionUs);
}
return new TrackStreamImpl(track);
return new TrackStreamImpl(group);
}
/* package */ void disable(int track) {
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.hls;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import android.content.Context;
import android.text.TextUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* A default {@link HlsTrackSelector} implementation.
*/
public final class DefaultHlsTrackSelector implements HlsTrackSelector {
private static final int TYPE_DEFAULT = 0;
private static final int TYPE_VTT = 1;
private final Context context;
private final int type;
/**
* Creates a {@link DefaultHlsTrackSelector} that selects the streams defined in the playlist.
*
* @param context A context.
* @return The selector instance.
*/
public static DefaultHlsTrackSelector newDefaultInstance(Context context) {
return new DefaultHlsTrackSelector(context, TYPE_DEFAULT);
}
/**
* Creates a {@link DefaultHlsTrackSelector} that selects subtitle renditions.
*
* @return The selector instance.
*/
public static DefaultHlsTrackSelector newVttInstance() {
return new DefaultHlsTrackSelector(null, TYPE_VTT);
}
private DefaultHlsTrackSelector(Context context, int type) {
this.context = context;
this.type = type;
}
@Override
public void selectTracks(HlsMasterPlaylist playlist, Output output) throws IOException {
if (type == TYPE_VTT) {
List<Variant> subtitleVariants = playlist.subtitles;
if (subtitleVariants != null && !subtitleVariants.isEmpty()) {
for (int i = 0; i < subtitleVariants.size(); i++) {
output.fixedTrack(playlist, subtitleVariants.get(i));
}
}
return;
}
// Type is TYPE_DEFAULT.
ArrayList<Variant> enabledVariantList = new ArrayList<>();
int[] variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, playlist.variants, null, false);
for (int i = 0; i < variantIndices.length; i++) {
enabledVariantList.add(playlist.variants.get(variantIndices[i]));
}
ArrayList<Variant> definiteVideoVariants = new ArrayList<>();
ArrayList<Variant> definiteAudioOnlyVariants = new ArrayList<>();
for (int i = 0; i < enabledVariantList.size(); i++) {
Variant variant = enabledVariantList.get(i);
if (variant.format.height > 0 || variantHasExplicitCodecWithPrefix(variant, "avc")) {
definiteVideoVariants.add(variant);
} else if (variantHasExplicitCodecWithPrefix(variant, "mp4a")) {
definiteAudioOnlyVariants.add(variant);
}
}
if (!definiteVideoVariants.isEmpty()) {
// We've identified some variants as definitely containing video. Assume variants within the
// master playlist are marked consistently, and hence that we have the full set. Filter out
// any other variants, which are likely to be audio only.
enabledVariantList = definiteVideoVariants;
} else if (definiteAudioOnlyVariants.size() < enabledVariantList.size()) {
// We've identified some variants, but not all, as being audio only. Filter them out to leave
// the remaining variants, which are likely to contain video.
enabledVariantList.removeAll(definiteAudioOnlyVariants);
} else {
// Leave the enabled variants unchanged. They're likely either all video or all audio.
}
if (enabledVariantList.size() > 1) {
Variant[] enabledVariants = new Variant[enabledVariantList.size()];
enabledVariantList.toArray(enabledVariants);
output.adaptiveTrack(playlist, enabledVariants);
}
for (int i = 0; i < enabledVariantList.size(); i++) {
output.fixedTrack(playlist, enabledVariantList.get(i));
}
}
private static boolean variantHasExplicitCodecWithPrefix(Variant variant, String prefix) {
String codecs = variant.format.codecs;
if (TextUtils.isEmpty(codecs)) {
return false;
}
String[] codecArray = codecs.split("(\\s*,\\s*)|(\\s*$)");
for (int i = 0; i < codecArray.length; i++) {
if (codecArray[i].startsWith(prefix)) {
return true;
}
}
return false;
}
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.hls;
import java.io.IOException;
/**
* Specifies a track selection from an {@link HlsMasterPlaylist}.
*/
public interface HlsTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified representations in the specified
* adaptation set.
*
* @param playlist The master playlist being processed.
* @param variants The variants to use for the adaptive track.
*/
void adaptiveTrack(HlsMasterPlaylist playlist, Variant[] variants);
/**
* Outputs an fixed track corresponding to the specified representation in the specified
* adaptation set.
*
* @param playlist The master playlist being processed.
* @param variant The variant to use for the track.
*/
void fixedTrack(HlsMasterPlaylist playlist, Variant variant);
}
/**
* Outputs a track selection for a given period.
*
* @param playlist The master playlist to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the period.
*/
void selectTracks(HlsMasterPlaylist playlist, Output output) throws IOException;
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.smoothstreaming;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.TrackElement;
import com.google.android.exoplayer.util.Util;
import android.content.Context;
import java.io.IOException;
import java.util.Arrays;
/**
* A default {@link SmoothStreamingTrackSelector} implementation.
*/
// TODO: Add more configuration options (e.g. ability to disable adaptive track output).
public final class DefaultSmoothStreamingTrackSelector implements SmoothStreamingTrackSelector {
private final int streamElementType;
private final Context context;
private final boolean filterVideoRepresentations;
private final boolean filterProtectedHdContent;
/**
* @param context A context. May be null if {@code filterVideoRepresentations == false}.
* @param filterVideoRepresentations Whether video representations should be filtered according to
* the capabilities of the device. It is strongly recommended to set this to {@code true},
* unless the application has already verified that all representations are playable.
* @param filterProtectedHdContent Whether video representations that are both drm protected and
* high definition should be filtered when tracks are built. If
* {@code filterVideoRepresentations == false} then this parameter is ignored.
*/
public static DefaultSmoothStreamingTrackSelector newVideoInstance(Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_VIDEO, context,
filterVideoRepresentations, filterProtectedHdContent);
}
public static DefaultSmoothStreamingTrackSelector newAudioInstance() {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_AUDIO, null, false, false);
}
public static DefaultSmoothStreamingTrackSelector newTextInstance() {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_TEXT, null, false, false);
}
private DefaultSmoothStreamingTrackSelector(int streamElementType, Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
this.context = context;
this.streamElementType = streamElementType;
this.filterVideoRepresentations = filterVideoRepresentations;
this.filterProtectedHdContent = filterProtectedHdContent;
}
@Override
public void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException {
for (int i = 0; i < manifest.streamElements.length; i++) {
TrackElement[] tracks = manifest.streamElements[i].tracks;
if (manifest.streamElements[i].type == streamElementType) {
if (streamElementType == StreamElement.TYPE_VIDEO) {
int[] trackIndices;
if (filterVideoRepresentations) {
trackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, Arrays.asList(tracks), null,
filterProtectedHdContent && manifest.protectionElement != null);
} else {
trackIndices = Util.firstIntegersArray(tracks.length);
}
int trackCount = trackIndices.length;
if (trackCount > 1) {
output.adaptiveTrack(manifest, i, trackIndices);
}
for (int j = 0; j < trackCount; j++) {
output.fixedTrack(manifest, i, trackIndices[j]);
}
} else {
for (int j = 0; j < tracks.length; j++) {
output.fixedTrack(manifest, i, j);
}
}
}
}
}
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.smoothstreaming;
import java.io.IOException;
/**
* Specifies a track selection from a {@link SmoothStreamingManifest}.
*/
public interface SmoothStreamingTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified tracks in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the adaptive tracks are located.
* @param tracks The indices of the tracks within the element.
*/
void adaptiveTrack(SmoothStreamingManifest manifest, int element, int[] tracks);
/**
* Outputs a fixed track corresponding to the specified track in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the track is located.
* @param track The index of the track within the element.
*/
void fixedTrack(SmoothStreamingManifest manifest, int element, int track);
}
/**
* Outputs a track selection for a given manifest.
*
* @param manifest The manifest to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the manifest.
*/
void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment