Commit be471489 by olly Committed by Oliver Woodman

ExoPlayer V2 Refactor - Step 4

Notes:
1. The logic in ExoPlayerImplInternal is very temporary, until we
   have proper TrackSelector implementations. Ignore the fact that
   it's crazy and has loads of nesting.
2. This change removes all capabilities checking. TrackRenderer
   implementations will be updated to perform these checks in a
   subsequent CL.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=113151233
parent 6cb20525
Showing with 683 additions and 1243 deletions
...@@ -22,7 +22,7 @@ import com.google.android.exoplayer.chunk.ChunkSampleSource; ...@@ -22,7 +22,7 @@ import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.ChunkSource; import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.dash.DashChunkSource; import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.DefaultDashTrackSelector; import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser;
import com.google.android.exoplayer.dash.mpd.UtcTimingElement; import com.google.android.exoplayer.dash.mpd.UtcTimingElement;
...@@ -201,8 +201,7 @@ public class DashSourceBuilder implements SourceBuilder { ...@@ -201,8 +201,7 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the video renderer. // Build the video renderer.
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_VIDEO,
DefaultDashTrackSelector.newVideoInstance(context, true, false),
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO); elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
...@@ -211,18 +210,18 @@ public class DashSourceBuilder implements SourceBuilder { ...@@ -211,18 +210,18 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the audio renderer. // Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_AUDIO,
DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO); DemoPlayer.TYPE_AUDIO);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO); DemoPlayer.TYPE_AUDIO);
// Build the text renderer. // Build the text renderer.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_TEXT,
DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT); DemoPlayer.TYPE_TEXT);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT); DemoPlayer.TYPE_TEXT);
......
...@@ -18,7 +18,6 @@ package com.google.android.exoplayer.demo.player; ...@@ -18,7 +18,6 @@ package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.DefaultLoadControl; import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl; import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder; import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder;
import com.google.android.exoplayer.hls.DefaultHlsTrackSelector;
import com.google.android.exoplayer.hls.HlsChunkSource; import com.google.android.exoplayer.hls.HlsChunkSource;
import com.google.android.exoplayer.hls.HlsPlaylist; import com.google.android.exoplayer.hls.HlsPlaylist;
import com.google.android.exoplayer.hls.HlsPlaylistParser; import com.google.android.exoplayer.hls.HlsPlaylistParser;
...@@ -120,9 +119,8 @@ public class HlsSourceBuilder implements SourceBuilder { ...@@ -120,9 +119,8 @@ public class HlsSourceBuilder implements SourceBuilder {
// Build the video/audio/metadata renderers. // Build the video/audio/metadata renderers.
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
HlsChunkSource chunkSource = new HlsChunkSource(true /* isMaster */, dataSource, url, HlsChunkSource chunkSource = new HlsChunkSource(HlsChunkSource.TYPE_DEFAULT, dataSource, url,
manifest, DefaultHlsTrackSelector.newDefaultInstance(context), bandwidthMeter, manifest, bandwidthMeter, timestampAdjusterProvider, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
timestampAdjusterProvider, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl, HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
MAIN_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); MAIN_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
......
...@@ -23,7 +23,6 @@ import com.google.android.exoplayer.chunk.ChunkSource; ...@@ -23,7 +23,6 @@ import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder; import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder;
import com.google.android.exoplayer.drm.MediaDrmCallback; import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.smoothstreaming.DefaultSmoothStreamingTrackSelector;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser;
...@@ -152,7 +151,7 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder { ...@@ -152,7 +151,7 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the video renderer. // Build the video renderer.
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher, ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false), SmoothStreamingManifest.StreamElement.TYPE_VIDEO,
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
...@@ -161,20 +160,18 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder { ...@@ -161,20 +160,18 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the audio renderer. // Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher, ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newAudioInstance(), SmoothStreamingManifest.StreamElement.TYPE_AUDIO, audioDataSource, null,
audioDataSource, null, LIVE_EDGE_LATENCY_MS); LIVE_EDGE_LATENCY_MS);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO);
DemoPlayer.TYPE_AUDIO);
// Build the text renderer. // Build the text renderer.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher, ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newTextInstance(), SmoothStreamingManifest.StreamElement.TYPE_TEXT, textDataSource, null,
textDataSource, null, LIVE_EDGE_LATENCY_MS); LIVE_EDGE_LATENCY_MS);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT);
DemoPlayer.TYPE_TEXT);
// Invoke the callback. // Invoke the callback.
player.onSource( player.onSource(
......
...@@ -81,10 +81,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -81,10 +81,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
} }
public void testGetAvailableRangeOnVod() { public void testGetAvailableRangeOnVod() {
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), AdaptationSet.TYPE_VIDEO, null,
DefaultDashTrackSelector.newVideoInstance(null, false, false), null, null); null);
chunkSource.prepare(); chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(new int[] {0});
TimeRange availableRange = chunkSource.getAvailableRange(); TimeRange availableRange = chunkSource.getAvailableRange();
checkAvailableRange(availableRange, 0, VOD_DURATION_MS * 1000); checkAvailableRange(availableRange, 0, VOD_DURATION_MS * 1000);
...@@ -103,9 +103,9 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -103,9 +103,9 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
public void testGetAvailableRangeOnMultiPeriodVod() { public void testGetAvailableRangeOnMultiPeriodVod() {
DashChunkSource chunkSource = new DashChunkSource(buildMultiPeriodVodMpd(), DashChunkSource chunkSource = new DashChunkSource(buildMultiPeriodVodMpd(),
DefaultDashTrackSelector.newVideoInstance(null, false, false), null, null); AdaptationSet.TYPE_VIDEO, null, null);
chunkSource.prepare(); chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(new int[] {0});
TimeRange availableRange = chunkSource.getAvailableRange(); TimeRange availableRange = chunkSource.getAvailableRange();
checkAvailableRange(availableRange, 0, MULTI_PERIOD_VOD_DURATION_MS * 1000); checkAvailableRange(availableRange, 0, MULTI_PERIOD_VOD_DURATION_MS * 1000);
} }
...@@ -118,11 +118,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -118,11 +118,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
} }
public void testSegmentIndexInitializationOnVod() { public void testSegmentIndexInitializationOnVod() {
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), AdaptationSet.TYPE_VIDEO,
DefaultDashTrackSelector.newVideoInstance(null, false, false), mock(DataSource.class), mock(DataSource.class), null);
null);
chunkSource.prepare(); chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(new int[] {0});
List<MediaChunk> queue = new ArrayList<>(); List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder(); ChunkOperationHolder out = new ChunkOperationHolder();
...@@ -322,12 +321,12 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -322,12 +321,12 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
ManifestFetcher<MediaPresentationDescription> manifestFetcher = mock(ManifestFetcher.class); ManifestFetcher<MediaPresentationDescription> manifestFetcher = mock(ManifestFetcher.class);
when(manifestFetcher.getManifest()).thenReturn(mpd); when(manifestFetcher.getManifest()).thenReturn(mpd);
DashChunkSource chunkSource = new DashChunkSource(manifestFetcher, mpd, DashChunkSource chunkSource = new DashChunkSource(manifestFetcher, mpd,
DefaultDashTrackSelector.newVideoInstance(null, false, false), mock(DataSource.class), null, AdaptationSet.TYPE_VIDEO, mock(DataSource.class), null,
new FakeClock(mpd.availabilityStartTime + mpd.duration - ELAPSED_REALTIME_OFFSET_MS), new FakeClock(mpd.availabilityStartTime + mpd.duration - ELAPSED_REALTIME_OFFSET_MS),
liveEdgeLatencyMs * 1000, ELAPSED_REALTIME_OFFSET_MS * 1000, startAtLiveEdge, null, null, liveEdgeLatencyMs * 1000, ELAPSED_REALTIME_OFFSET_MS * 1000, startAtLiveEdge, null, null,
0); 0);
chunkSource.prepare(); chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(new int[] {0});
return chunkSource; return chunkSource;
} }
......
...@@ -13,4 +13,4 @@ ...@@ -13,4 +13,4 @@
# Project target. # Project target.
target=android-23 target=android-23
android.library=false android.library=false
android.library.reference.1=../experimental android.library.reference.1=../main
...@@ -75,7 +75,8 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -75,7 +75,8 @@ import java.util.concurrent.atomic.AtomicInteger;
private final long minBufferUs; private final long minBufferUs;
private final long minRebufferUs; private final long minRebufferUs;
private final List<TrackRenderer> enabledRenderers; private final List<TrackRenderer> enabledRenderers;
private final int[][] trackIndices; private final int[][] groupIndices;
private final int[][][] trackIndices;
private final int[] selectedTrackIndices; private final int[] selectedTrackIndices;
private final Handler handler; private final Handler handler;
private final HandlerThread internalPlaybackThread; private final HandlerThread internalPlaybackThread;
...@@ -125,7 +126,8 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -125,7 +126,8 @@ import java.util.concurrent.atomic.AtomicInteger;
standaloneMediaClock = new StandaloneMediaClock(); standaloneMediaClock = new StandaloneMediaClock();
pendingSeekCount = new AtomicInteger(); pendingSeekCount = new AtomicInteger();
enabledRenderers = new ArrayList<>(renderers.length); enabledRenderers = new ArrayList<>(renderers.length);
trackIndices = new int[renderers.length][]; groupIndices = new int[renderers.length][];
trackIndices = new int[renderers.length][][];
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can // Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect. // not normally change to this priority" is incorrect.
internalPlaybackThread = new PriorityHandlerThread("ExoPlayerImplInternal:Handler", internalPlaybackThread = new PriorityHandlerThread("ExoPlayerImplInternal:Handler",
...@@ -301,24 +303,56 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -301,24 +303,56 @@ import java.util.concurrent.atomic.AtomicInteger;
boolean allRenderersEnded = true; boolean allRenderersEnded = true;
boolean allRenderersReadyOrEnded = true; boolean allRenderersReadyOrEnded = true;
// Establish the mapping from renderer to track index (trackIndices), and build a list of // The maximum number of tracks that one renderer can support is the total number of tracks in
// formats corresponding to each renderer (trackFormats). // all groups, plus possibly one adaptive track per group.
int trackCount = source.getTrackCount(); int maxTrackCount = source.getTrackGroupCount();
boolean[] trackMappedFlags = new boolean[trackCount]; for (int groupIndex = 0; groupIndex < source.getTrackGroupCount(); groupIndex++) {
maxTrackCount += source.getTrackGroup(groupIndex).length;
}
// Construct tracks for each renderer.
MediaFormat[][] trackFormats = new MediaFormat[renderers.length][]; MediaFormat[][] trackFormats = new MediaFormat[renderers.length][];
for (int rendererIndex = 0; rendererIndex < renderers.length; rendererIndex++) { for (int rendererIndex = 0; rendererIndex < renderers.length; rendererIndex++) {
TrackRenderer renderer = renderers[rendererIndex]; TrackRenderer renderer = renderers[rendererIndex];
int rendererTrackCount = 0; int rendererTrackCount = 0;
int[] rendererTrackIndices = new int[trackCount]; int[] rendererTrackGroups = new int[maxTrackCount];
MediaFormat[] rendererTrackFormats = new MediaFormat[trackCount]; int[][] rendererTrackIndices = new int[maxTrackCount][];
for (int trackIndex = 0; trackIndex < trackCount; trackIndex++) { MediaFormat[] rendererTrackFormats = new MediaFormat[maxTrackCount];
MediaFormat trackFormat = source.getFormat(trackIndex); for (int groupIndex = 0; groupIndex < source.getTrackGroupCount(); groupIndex++) {
if (!trackMappedFlags[trackIndex] && renderer.handlesTrack(trackFormat)) { TrackGroup trackGroup = source.getTrackGroup(groupIndex);
trackMappedFlags[trackIndex] = true; // TODO[REFACTOR]: This should check that the renderer is capable of adaptive playback, in
rendererTrackIndices[rendererTrackCount] = trackIndex; // addition to checking that the group is adaptive.
rendererTrackFormats[rendererTrackCount++] = trackFormat; if (trackGroup.adaptive) {
// Try and build an adaptive track.
int adaptiveTrackIndexCount = 0;
int[] adaptiveTrackIndices = new int[trackGroup.length];
MediaFormat adaptiveTrackFormat = null;
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
MediaFormat trackFormat = source.getTrackGroup(groupIndex).getFormat(trackIndex);
if (renderer.handlesTrack(trackFormat)) {
adaptiveTrackIndices[adaptiveTrackIndexCount++] = trackIndex;
if (adaptiveTrackFormat == null) {
adaptiveTrackFormat = trackFormat.copyAsAdaptive("auto");
}
}
}
if (adaptiveTrackIndexCount > 1) {
// We succeeded in building an adaptive track.
rendererTrackGroups[rendererTrackCount] = groupIndex;
rendererTrackIndices[rendererTrackCount] =
Arrays.copyOf(adaptiveTrackIndices, adaptiveTrackIndexCount);
rendererTrackFormats[rendererTrackCount++] = adaptiveTrackFormat;
}
}
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
MediaFormat trackFormat = source.getTrackGroup(groupIndex).getFormat(trackIndex);
if (renderer.handlesTrack(trackFormat)) {
rendererTrackGroups[rendererTrackCount] = groupIndex;
rendererTrackIndices[rendererTrackCount] = new int[] {trackIndex};
rendererTrackFormats[rendererTrackCount++] = trackFormat;
}
} }
} }
groupIndices[rendererIndex] = Arrays.copyOf(rendererTrackGroups, rendererTrackCount);
trackIndices[rendererIndex] = Arrays.copyOf(rendererTrackIndices, rendererTrackCount); trackIndices[rendererIndex] = Arrays.copyOf(rendererTrackIndices, rendererTrackCount);
trackFormats[rendererIndex] = Arrays.copyOf(rendererTrackFormats, rendererTrackCount); trackFormats[rendererIndex] = Arrays.copyOf(rendererTrackFormats, rendererTrackCount);
} }
...@@ -328,8 +362,8 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -328,8 +362,8 @@ import java.util.concurrent.atomic.AtomicInteger;
TrackRenderer renderer = renderers[rendererIndex]; TrackRenderer renderer = renderers[rendererIndex];
int trackIndex = selectedTrackIndices[rendererIndex]; int trackIndex = selectedTrackIndices[rendererIndex];
if (0 <= trackIndex && trackIndex < trackIndices[rendererIndex].length) { if (0 <= trackIndex && trackIndex < trackIndices[rendererIndex].length) {
int sourceTrackIndex = trackIndices[rendererIndex][trackIndex]; TrackStream trackStream = source.enable(groupIndices[rendererIndex][trackIndex],
TrackStream trackStream = source.enable(sourceTrackIndex, positionUs); trackIndices[rendererIndex][trackIndex], positionUs);
renderer.enable(trackStream, positionUs, false); renderer.enable(trackStream, positionUs, false);
enabledRenderers.add(renderer); enabledRenderers.add(renderer);
allRenderersEnded = allRenderersEnded && renderer.isEnded(); allRenderersEnded = allRenderersEnded && renderer.isEnded();
...@@ -606,8 +640,8 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -606,8 +640,8 @@ import java.util.concurrent.atomic.AtomicInteger;
boolean playing = playWhenReady && state == ExoPlayer.STATE_READY; boolean playing = playWhenReady && state == ExoPlayer.STATE_READY;
// Consider as joining if the renderer was previously disabled, but not when switching tracks. // Consider as joining if the renderer was previously disabled, but not when switching tracks.
boolean joining = !isEnabled && playing; boolean joining = !isEnabled && playing;
int sourceTrackIndex = trackIndices[rendererIndex][trackIndex]; TrackStream trackStream = source.enable(groupIndices[rendererIndex][trackIndex],
TrackStream trackStream = source.enable(sourceTrackIndex, positionUs); trackIndices[rendererIndex][trackIndex], positionUs);
renderer.enable(trackStream, positionUs, joining); renderer.enable(trackStream, positionUs, joining);
enabledRenderers.add(renderer); enabledRenderers.add(renderer);
if (playing) { if (playing) {
......
...@@ -71,14 +71,14 @@ public final class FrameworkSampleSource implements SampleSource { ...@@ -71,14 +71,14 @@ public final class FrameworkSampleSource implements SampleSource {
private final long fileDescriptorOffset; private final long fileDescriptorOffset;
private final long fileDescriptorLength; private final long fileDescriptorLength;
private MediaExtractor extractor;
private MediaFormat[] trackFormats;
private boolean prepared; private boolean prepared;
private long durationUs; private long durationUs;
private int enabledTrackCount; private MediaExtractor extractor;
private TrackGroup[] tracks;
private int[] trackStates; private int[] trackStates;
private boolean[] pendingResets; private boolean[] pendingResets;
private int enabledTrackCount;
private long lastSeekPositionUs; private long lastSeekPositionUs;
private long pendingSeekPositionUs; private long pendingSeekPositionUs;
...@@ -132,10 +132,11 @@ public final class FrameworkSampleSource implements SampleSource { ...@@ -132,10 +132,11 @@ public final class FrameworkSampleSource implements SampleSource {
durationUs = C.UNKNOWN_TIME_US; durationUs = C.UNKNOWN_TIME_US;
trackStates = new int[extractor.getTrackCount()]; trackStates = new int[extractor.getTrackCount()];
pendingResets = new boolean[trackStates.length]; pendingResets = new boolean[trackStates.length];
trackFormats = new MediaFormat[trackStates.length]; tracks = new TrackGroup[trackStates.length];
for (int i = 0; i < trackStates.length; i++) { for (int i = 0; i < trackStates.length; i++) {
trackFormats[i] = createMediaFormat(extractor.getTrackFormat(i)); MediaFormat format = createMediaFormat(extractor.getTrackFormat(i));
long trackDurationUs = trackFormats[i].durationUs; tracks[i] = new TrackGroup(format);
long trackDurationUs = format.durationUs;
if (trackDurationUs > durationUs) { if (trackDurationUs > durationUs) {
durationUs = trackDurationUs; durationUs = trackDurationUs;
} }
...@@ -155,15 +156,13 @@ public final class FrameworkSampleSource implements SampleSource { ...@@ -155,15 +156,13 @@ public final class FrameworkSampleSource implements SampleSource {
} }
@Override @Override
public int getTrackCount() { public int getTrackGroupCount() {
Assertions.checkState(prepared); return tracks.length;
return trackStates.length;
} }
@Override @Override
public MediaFormat getFormat(int track) { public TrackGroup getTrackGroup(int group) {
Assertions.checkState(prepared); return tracks[group];
return trackFormats[track];
} }
@Override @Override
...@@ -172,14 +171,14 @@ public final class FrameworkSampleSource implements SampleSource { ...@@ -172,14 +171,14 @@ public final class FrameworkSampleSource implements SampleSource {
} }
@Override @Override
public TrackStream enable(int track, long positionUs) { public TrackStream enable(int group, int[] track, long positionUs) {
Assertions.checkState(prepared); Assertions.checkState(prepared);
Assertions.checkState(trackStates[track] == TRACK_STATE_DISABLED); Assertions.checkState(trackStates[group] == TRACK_STATE_DISABLED);
enabledTrackCount++; enabledTrackCount++;
trackStates[track] = TRACK_STATE_ENABLED; trackStates[group] = TRACK_STATE_ENABLED;
extractor.selectTrack(track); extractor.selectTrack(group);
seekToUsInternal(positionUs, positionUs != 0); seekToUsInternal(positionUs, positionUs != 0);
return new TrackStreamImpl(track); return new TrackStreamImpl(group);
} }
/* package */ long readReset(int track) { /* package */ long readReset(int track) {
...@@ -197,7 +196,7 @@ public final class FrameworkSampleSource implements SampleSource { ...@@ -197,7 +196,7 @@ public final class FrameworkSampleSource implements SampleSource {
return TrackStream.NOTHING_READ; return TrackStream.NOTHING_READ;
} }
if (trackStates[track] != TRACK_STATE_FORMAT_SENT) { if (trackStates[track] != TRACK_STATE_FORMAT_SENT) {
formatHolder.format = trackFormats[track]; formatHolder.format = tracks[track].getFormat(0);
formatHolder.drmInitData = Util.SDK_INT >= 18 ? getDrmInitDataV18() : null; formatHolder.drmInitData = Util.SDK_INT >= 18 ? getDrmInitDataV18() : null;
trackStates[track] = TRACK_STATE_FORMAT_SENT; trackStates[track] = TRACK_STATE_FORMAT_SENT;
return TrackStream.FORMAT_READ; return TrackStream.FORMAT_READ;
......
...@@ -28,7 +28,6 @@ import android.text.TextUtils; ...@@ -28,7 +28,6 @@ import android.text.TextUtils;
import android.util.Log; import android.util.Log;
import android.util.Pair; import android.util.Pair;
import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
/** /**
...@@ -43,8 +42,7 @@ public final class MediaCodecUtil { ...@@ -43,8 +42,7 @@ public final class MediaCodecUtil {
* Such failures are not expected in normal operation and are normally temporary (e.g. if the * Such failures are not expected in normal operation and are normally temporary (e.g. if the
* mediaserver process has crashed and is yet to restart). * mediaserver process has crashed and is yet to restart).
*/ */
// TODO[REFACTOR]: Shouldn't implement IOException. public static class DecoderQueryException extends Exception {
public static class DecoderQueryException extends IOException {
private DecoderQueryException(Throwable cause) { private DecoderQueryException(Throwable cause) {
super("Failed to query underlying media codecs", cause); super("Failed to query underlying media codecs", cause);
......
...@@ -15,6 +15,8 @@ ...@@ -15,6 +15,8 @@
*/ */
package com.google.android.exoplayer; package com.google.android.exoplayer;
import android.util.Pair;
import java.io.IOException; import java.io.IOException;
/** /**
...@@ -26,8 +28,7 @@ public class MultiSampleSource implements SampleSource { ...@@ -26,8 +28,7 @@ public class MultiSampleSource implements SampleSource {
private boolean prepared; private boolean prepared;
private long durationUs; private long durationUs;
private SampleSource[] trackSources; private TrackGroup[] tracks;
private int[] trackIndices;
public MultiSampleSource(SampleSource... sources) { public MultiSampleSource(SampleSource... sources) {
this.sources = sources; this.sources = sources;
...@@ -45,21 +46,19 @@ public class MultiSampleSource implements SampleSource { ...@@ -45,21 +46,19 @@ public class MultiSampleSource implements SampleSource {
if (prepared) { if (prepared) {
this.prepared = true; this.prepared = true;
this.durationUs = C.UNKNOWN_TIME_US; this.durationUs = C.UNKNOWN_TIME_US;
int trackCount = 0; int totalTrackGroupCount = 0;
for (int i = 0; i < sources.length; i++) { for (int i = 0; i < sources.length; i++) {
trackCount += sources[i].getTrackCount(); totalTrackGroupCount += sources[i].getTrackGroupCount();
if (sources[i].getDurationUs() > durationUs) { if (sources[i].getDurationUs() > durationUs) {
durationUs = sources[i].getDurationUs(); durationUs = sources[i].getDurationUs();
} }
} }
trackSources = new SampleSource[trackCount]; tracks = new TrackGroup[totalTrackGroupCount];
trackIndices = new int[trackCount]; int trackGroupIndex = 0;
int index = 0;
for (int i = 0; i < sources.length; i++) { for (int i = 0; i < sources.length; i++) {
int thisSourceTrackCount = sources[i].getTrackCount(); int sourceTrackGroupCount = sources[i].getTrackGroupCount();
for (int j = 0; j < thisSourceTrackCount; j++) { for (int j = 0; j < sourceTrackGroupCount; j++) {
trackSources[index] = sources[i]; tracks[trackGroupIndex++] = sources[i].getTrackGroup(j);
trackIndices[index++] = j;
} }
} }
} }
...@@ -72,18 +71,19 @@ public class MultiSampleSource implements SampleSource { ...@@ -72,18 +71,19 @@ public class MultiSampleSource implements SampleSource {
} }
@Override @Override
public int getTrackCount() { public int getTrackGroupCount() {
return trackSources.length; return tracks.length;
} }
@Override @Override
public MediaFormat getFormat(int track) { public TrackGroup getTrackGroup(int group) {
return trackSources[track].getFormat(trackIndices[track]); return tracks[group];
} }
@Override @Override
public TrackStream enable(int track, long positionUs) { public TrackStream enable(int group, int[] tracks, long positionUs) {
return trackSources[track].enable(trackIndices[track], positionUs); Pair<Integer, Integer> sourceAndGroup = getSourceAndTrackGroupIndices(group);
return sources[sourceAndGroup.first].enable(sourceAndGroup.second, tracks, positionUs);
} }
@Override @Override
...@@ -129,4 +129,16 @@ public class MultiSampleSource implements SampleSource { ...@@ -129,4 +129,16 @@ public class MultiSampleSource implements SampleSource {
prepared = false; prepared = false;
} }
private Pair<Integer, Integer> getSourceAndTrackGroupIndices(int group) {
int totalTrackGroupCount = 0;
for (int i = 0; i < sources.length; i++) {
int sourceTrackGroupCount = sources[i].getTrackGroupCount();
if (group < totalTrackGroupCount + sourceTrackGroupCount) {
return Pair.create(i, group - totalTrackGroupCount);
}
totalTrackGroupCount += sourceTrackGroupCount;
}
throw new IndexOutOfBoundsException();
}
} }
...@@ -19,10 +19,6 @@ import java.io.IOException; ...@@ -19,10 +19,6 @@ import java.io.IOException;
/** /**
* A source of media. * A source of media.
* <p>
* A {@link SampleSource} may expose one or multiple tracks. The number of tracks and each track's
* media format can be queried using {@link #getTrackCount()} and {@link #getFormat(int)}
* respectively.
*/ */
public interface SampleSource { public interface SampleSource {
...@@ -56,30 +52,23 @@ public interface SampleSource { ...@@ -56,30 +52,23 @@ public interface SampleSource {
long getDurationUs(); long getDurationUs();
/** /**
* Returns the number of tracks exposed by the source. * Returns the number of track groups exposed by the source.
* <p> * <p>
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared.
* *
* @return The number of tracks. * @return The number of track groups exposed by the source.
*/ */
int getTrackCount(); public int getTrackGroupCount();
/** /**
* Returns the format of the specified track. * Returns the {@link TrackGroup} at the specified index.
* <p>
* Note that whilst the format of a track will remain constant, the format of the actual media
* stream may change dynamically. An example of this is where the track is adaptive (i.e.
* {@link MediaFormat#adaptive} is true). Hence the track formats returned through this method
* should not be used to configure decoders. Decoder configuration should be performed using the
* formats obtained when reading the media stream through calls to
* {@link TrackStream#readData(MediaFormatHolder, SampleHolder)}.
* <p> * <p>
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared.
* *
* @param track The track index. * @int group The group index.
* @return The format of the specified track. * @return The corresponding {@link TrackGroup}.
*/ */
MediaFormat getFormat(int track); public TrackGroup getTrackGroup(int group);
/** /**
* Indicates to the source that it should continue buffering data for its enabled tracks. * Indicates to the source that it should continue buffering data for its enabled tracks.
...@@ -112,17 +101,19 @@ public interface SampleSource { ...@@ -112,17 +101,19 @@ public interface SampleSource {
void seekToUs(long positionUs); void seekToUs(long positionUs);
/** /**
* Enables the specified track. Returning a {@link TrackStream} from which the track's data can * Enables the specified group to read the specified tracks. A {@link TrackStream} is returned
* be read. * through which the enabled track's data can be read.
* <p> * <p>
* This method should only be called after the source has been prepared, and when the specified * This method should only be called after the source has been prepared, and when the specified
* track is disabled. * group is disabled. Note that {@code tracks.length} is only permitted to be greater than one
* if {@link TrackGroup#adaptive} is true for the group.
* *
* @param track The track to enable. * @param group The group index.
* @param tracks The track indices.
* @param positionUs The current playback position in microseconds. * @param positionUs The current playback position in microseconds.
* @return A {@link TrackStream} from which the enabled track's data can be read. * @return A {@link TrackStream} from which the enabled track's data can be read.
*/ */
TrackStream enable(int track, long positionUs); public TrackStream enable(int group, int[] tracks, long positionUs);
/** /**
* Releases the source. * Releases the source.
......
...@@ -52,6 +52,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load ...@@ -52,6 +52,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
private final DataSource dataSource; private final DataSource dataSource;
private final MediaFormat format; private final MediaFormat format;
private final int minLoadableRetryCount; private final int minLoadableRetryCount;
private final TrackGroup tracks;
private int state; private int state;
private byte[] sampleData; private byte[] sampleData;
...@@ -73,6 +74,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load ...@@ -73,6 +74,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
this.dataSource = dataSource; this.dataSource = dataSource;
this.format = format; this.format = format;
this.minLoadableRetryCount = minLoadableRetryCount; this.minLoadableRetryCount = minLoadableRetryCount;
tracks = new TrackGroup(format);
sampleData = new byte[INITIAL_SAMPLE_SIZE]; sampleData = new byte[INITIAL_SAMPLE_SIZE];
} }
...@@ -102,17 +104,17 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load ...@@ -102,17 +104,17 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
} }
@Override @Override
public int getTrackCount() { public int getTrackGroupCount() {
return 1; return 1;
} }
@Override @Override
public MediaFormat getFormat(int track) { public TrackGroup getTrackGroup(int group) {
return format; return tracks;
} }
@Override @Override
public TrackStream enable(int track, long positionUs) { public TrackStream enable(int group, int[] tracks, long positionUs) {
state = STATE_SEND_FORMAT; state = STATE_SEND_FORMAT;
clearCurrentLoadableException(); clearCurrentLoadableException();
maybeStartLoading(); maybeStartLoading();
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer;
import com.google.android.exoplayer.SampleSource.TrackStream;
/**
* Defines a group of tracks exposed by a {@link SampleSource}.
* <p>
* A {@link SampleSource} is only able to provide one {@link TrackStream} corresponding to a group
* at any given time. If {@link #adaptive} is true this {@link TrackStream} can adapt between
* multiple tracks within the group. If {@link #adaptive} is false then it's only possible to
* consume one track from the group at a given time.
*/
public final class TrackGroup {
/**
* The number of tracks in the group.
*/
public final int length;
/**
* Whether it's possible to adapt between multiple tracks in the group.
*/
public final boolean adaptive;
private final MediaFormat[] formats;
/**
* @param format The format of the single track.
*/
public TrackGroup(MediaFormat format) {
this(false, format);
}
/**
* @param supportsAdaptive Whether it's possible to adapt between multiple tracks in the group.
* @param formats The track formats.
*/
public TrackGroup(boolean supportsAdaptive, MediaFormat... formats) {
this.adaptive = supportsAdaptive;
this.formats = formats;
length = formats.length;
}
/**
* Gets the format of the track at a given index.
*
* @param index The index of the track.
* @return The track's format.
*/
public MediaFormat getFormat(int index) {
return formats[index];
}
}
...@@ -22,6 +22,7 @@ import com.google.android.exoplayer.MediaFormatHolder; ...@@ -22,6 +22,7 @@ import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleHolder; import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource; import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.SampleSource.TrackStream; import com.google.android.exoplayer.SampleSource.TrackStream;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.extractor.DefaultTrackOutput; import com.google.android.exoplayer.extractor.DefaultTrackOutput;
import com.google.android.exoplayer.upstream.Loader; import com.google.android.exoplayer.upstream.Loader;
import com.google.android.exoplayer.upstream.Loader.Loadable; import com.google.android.exoplayer.upstream.Loader.Loadable;
...@@ -153,9 +154,11 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call ...@@ -153,9 +154,11 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call
return false; return false;
} }
durationUs = C.UNKNOWN_TIME_US; durationUs = C.UNKNOWN_TIME_US;
if (chunkSource.getTrackCount() > 0) { TrackGroup trackGroup = chunkSource.getTracks();
loader = new Loader("Loader:" + chunkSource.getFormat(0).mimeType); if (trackGroup.length > 0) {
durationUs = chunkSource.getFormat(0).durationUs; MediaFormat firstTrackFormat = trackGroup.getFormat(0);
loader = new Loader("Loader:" + firstTrackFormat.mimeType);
durationUs = firstTrackFormat.durationUs;
} }
state = STATE_PREPARED; state = STATE_PREPARED;
return true; return true;
...@@ -172,23 +175,22 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call ...@@ -172,23 +175,22 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call
} }
@Override @Override
public int getTrackCount() { public int getTrackGroupCount() {
Assertions.checkState(state != STATE_IDLE); return 1;
return chunkSource.getTrackCount();
} }
@Override @Override
public MediaFormat getFormat(int track) { public TrackGroup getTrackGroup(int group) {
Assertions.checkState(state != STATE_IDLE); Assertions.checkState(state != STATE_IDLE);
return chunkSource.getFormat(track); return chunkSource.getTracks();
} }
@Override @Override
public TrackStream enable(int track, long positionUs) { public TrackStream enable(int group, int[] tracks, long positionUs) {
Assertions.checkState(state == STATE_PREPARED); Assertions.checkState(state == STATE_PREPARED);
Assertions.checkState(enabledTrackCount++ == 0); Assertions.checkState(enabledTrackCount++ == 0);
state = STATE_ENABLED; state = STATE_ENABLED;
chunkSource.enable(track); chunkSource.enable(tracks);
loadControl.register(this, bufferSizeContribution); loadControl.register(this, bufferSizeContribution);
downstreamFormat = null; downstreamFormat = null;
downstreamMediaFormat = null; downstreamMediaFormat = null;
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
*/ */
package com.google.android.exoplayer.chunk; package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.TrackGroup;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
...@@ -48,33 +48,23 @@ public interface ChunkSource { ...@@ -48,33 +48,23 @@ public interface ChunkSource {
boolean prepare(); boolean prepare();
/** /**
* Returns the number of tracks exposed by the source. * Gets the group of tracks provided by the source.
* <p> * <p>
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared.
* *
* @return The number of tracks. * @return The track group.
*/ */
int getTrackCount(); TrackGroup getTracks();
/** /**
* Gets the format of the specified track. * Enable the source for the specified tracks.
* <p> * <p>
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared and when the source is
*
* @param track The track index.
* @return The format of the track.
*/
MediaFormat getFormat(int track);
/**
* Enable the source for the specified track.
* <p>
* This method should only be called after the source has been prepared, and when the source is
* disabled. * disabled.
* *
* @param track The track index. * @param tracks The track indices.
*/ */
void enable(int track); void enable(int[] tracks);
/** /**
* Indicates to the source that it should still be checking for updates to the stream. * Indicates to the source that it should still be checking for updates to the stream.
......
...@@ -17,6 +17,7 @@ package com.google.android.exoplayer.chunk; ...@@ -17,6 +17,7 @@ package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.C; import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec; import com.google.android.exoplayer.upstream.DataSpec;
...@@ -34,7 +35,7 @@ public final class SingleSampleChunkSource implements ChunkSource { ...@@ -34,7 +35,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
private final DataSpec dataSpec; private final DataSpec dataSpec;
private final Format format; private final Format format;
private final long durationUs; private final long durationUs;
private final MediaFormat mediaFormat; private final TrackGroup tracks;
/** /**
* @param dataSource A {@link DataSource} suitable for loading the sample data. * @param dataSource A {@link DataSource} suitable for loading the sample data.
...@@ -50,7 +51,7 @@ public final class SingleSampleChunkSource implements ChunkSource { ...@@ -50,7 +51,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
this.dataSpec = dataSpec; this.dataSpec = dataSpec;
this.format = format; this.format = format;
this.durationUs = durationUs; this.durationUs = durationUs;
this.mediaFormat = mediaFormat; tracks = new TrackGroup(mediaFormat);
} }
@Override @Override
...@@ -59,17 +60,12 @@ public final class SingleSampleChunkSource implements ChunkSource { ...@@ -59,17 +60,12 @@ public final class SingleSampleChunkSource implements ChunkSource {
} }
@Override @Override
public int getTrackCount() { public TrackGroup getTracks() {
return 1; return tracks;
} }
@Override @Override
public MediaFormat getFormat(int track) { public void enable(int[] tracks) {
return mediaFormat;
}
@Override
public void enable(int track) {
// Do nothing. // Do nothing.
} }
...@@ -111,7 +107,7 @@ public final class SingleSampleChunkSource implements ChunkSource { ...@@ -111,7 +107,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
private SingleSampleMediaChunk initChunk() { private SingleSampleMediaChunk initChunk() {
return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_UNSPECIFIED, format, 0, return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_UNSPECIFIED, format, 0,
durationUs, 0, mediaFormat, null, Chunk.NO_PARENT_ID); durationUs, 0, tracks.getFormat(0), null, Chunk.NO_PARENT_ID);
} }
} }
...@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormat; ...@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TimeRange; import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.TimeRange.DynamicTimeRange; import com.google.android.exoplayer.TimeRange.DynamicTimeRange;
import com.google.android.exoplayer.TimeRange.StaticTimeRange; import com.google.android.exoplayer.TimeRange.StaticTimeRange;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.chunk.Chunk; import com.google.android.exoplayer.chunk.Chunk;
import com.google.android.exoplayer.chunk.ChunkExtractorWrapper; import com.google.android.exoplayer.chunk.ChunkExtractorWrapper;
import com.google.android.exoplayer.chunk.ChunkOperationHolder; import com.google.android.exoplayer.chunk.ChunkOperationHolder;
...@@ -33,7 +34,6 @@ import com.google.android.exoplayer.chunk.FormatEvaluator.Evaluation; ...@@ -33,7 +34,6 @@ import com.google.android.exoplayer.chunk.FormatEvaluator.Evaluation;
import com.google.android.exoplayer.chunk.InitializationChunk; import com.google.android.exoplayer.chunk.InitializationChunk;
import com.google.android.exoplayer.chunk.MediaChunk; import com.google.android.exoplayer.chunk.MediaChunk;
import com.google.android.exoplayer.chunk.SingleSampleMediaChunk; import com.google.android.exoplayer.chunk.SingleSampleMediaChunk;
import com.google.android.exoplayer.dash.DashTrackSelector.Output;
import com.google.android.exoplayer.dash.mpd.AdaptationSet; import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.ContentProtection; import com.google.android.exoplayer.dash.mpd.ContentProtection;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
...@@ -56,7 +56,6 @@ import android.util.Log; ...@@ -56,7 +56,6 @@ import android.util.Log;
import android.util.SparseArray; import android.util.SparseArray;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
...@@ -75,7 +74,8 @@ import java.util.List; ...@@ -75,7 +74,8 @@ import java.util.List;
* </ol> * </ol>
*/ */
// TODO: handle cases where the above assumption are false // TODO: handle cases where the above assumption are false
public class DashChunkSource implements ChunkSource, Output { // TODO[REFACTOR]: Handle multiple adaptation sets of the same type (at a higher level).
public class DashChunkSource implements ChunkSource {
/** /**
* Interface definition for a callback to be notified of {@link DashChunkSource} events. * Interface definition for a callback to be notified of {@link DashChunkSource} events.
...@@ -108,12 +108,11 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -108,12 +108,11 @@ public class DashChunkSource implements ChunkSource, Output {
private final Handler eventHandler; private final Handler eventHandler;
private final EventListener eventListener; private final EventListener eventListener;
private final int adaptationSetType;
private final DataSource dataSource; private final DataSource dataSource;
private final FormatEvaluator adaptiveFormatEvaluator; private final FormatEvaluator adaptiveFormatEvaluator;
private final Evaluation evaluation; private final Evaluation evaluation;
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher; private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
private final DashTrackSelector trackSelector;
private final ArrayList<ExposedTrack> tracks;
private final SparseArray<PeriodHolder> periodHolders; private final SparseArray<PeriodHolder> periodHolders;
private final Clock systemClock; private final Clock systemClock;
private final long liveEdgeLatencyUs; private final long liveEdgeLatencyUs;
...@@ -122,20 +121,28 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -122,20 +121,28 @@ public class DashChunkSource implements ChunkSource, Output {
private final boolean live; private final boolean live;
private final int eventSourceId; private final int eventSourceId;
private boolean prepareCalled;
private MediaPresentationDescription currentManifest; private MediaPresentationDescription currentManifest;
private MediaPresentationDescription processedManifest; private MediaPresentationDescription processedManifest;
private ExposedTrack enabledTrack;
private int nextPeriodHolderIndex; private int nextPeriodHolderIndex;
private TimeRange availableRange; private TimeRange availableRange;
private boolean prepareCalled;
private boolean startAtLiveEdge; private boolean startAtLiveEdge;
private boolean lastChunkWasInitialization; private boolean lastChunkWasInitialization;
private IOException fatalError; private IOException fatalError;
// Properties of exposed tracks.
private int adaptationSetIndex;
private TrackGroup trackGroup;
private Format[] trackFormats;
// Properties of enabled tracks.
private Format[] enabledFormats;
private int adaptiveMaxWidth;
private int adaptiveMaxHeight;
/** /**
* Lightweight constructor to use for fixed duration content. * Lightweight constructor to use for fixed duration content.
* *
* @param trackSelector Selects tracks to be exposed by this source.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param durationMs The duration of the content. * @param durationMs The duration of the content.
...@@ -144,17 +151,15 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -144,17 +151,15 @@ public class DashChunkSource implements ChunkSource, Output {
* {@link AdaptationSet#TYPE_TEXT}. * {@link AdaptationSet#TYPE_TEXT}.
* @param representations The representations to be considered by the source. * @param representations The representations to be considered by the source.
*/ */
public DashChunkSource(DashTrackSelector trackSelector, DataSource dataSource, public DashChunkSource(DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
FormatEvaluator adaptiveFormatEvaluator, long durationMs, int adaptationSetType, long durationMs, int adaptationSetType, Representation... representations) {
Representation... representations) { this(dataSource, adaptiveFormatEvaluator, durationMs, adaptationSetType,
this(trackSelector, dataSource, adaptiveFormatEvaluator, durationMs, adaptationSetType,
Arrays.asList(representations)); Arrays.asList(representations));
} }
/** /**
* Lightweight constructor to use for fixed duration content. * Lightweight constructor to use for fixed duration content.
* *
* @param trackSelector Selects tracks to be exposed by this source.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param durationMs The duration of the content. * @param durationMs The duration of the content.
...@@ -163,25 +168,26 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -163,25 +168,26 @@ public class DashChunkSource implements ChunkSource, Output {
* {@link AdaptationSet#TYPE_TEXT}. * {@link AdaptationSet#TYPE_TEXT}.
* @param representations The representations to be considered by the source. * @param representations The representations to be considered by the source.
*/ */
public DashChunkSource(DashTrackSelector trackSelector, DataSource dataSource, public DashChunkSource(DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
FormatEvaluator adaptiveFormatEvaluator, long durationMs, int adaptationSetType, long durationMs, int adaptationSetType, List<Representation> representations) {
List<Representation> representations) { this(buildManifest(durationMs, adaptationSetType, representations), adaptationSetType,
this(buildManifest(durationMs, adaptationSetType, representations), trackSelector, dataSource, dataSource, adaptiveFormatEvaluator);
adaptiveFormatEvaluator);
} }
/** /**
* Constructor to use for fixed duration content. * Constructor to use for fixed duration content.
* *
* @param manifest The manifest. * @param manifest The manifest.
* @param trackSelector Selects tracks from manifest periods to be exposed by this source. * @param adaptationSetType The type of the adaptation set exposed by this source. One of
* {@link AdaptationSet#TYPE_AUDIO}, {@link AdaptationSet#TYPE_VIDEO} and
* {@link AdaptationSet#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
*/ */
public DashChunkSource(MediaPresentationDescription manifest, DashTrackSelector trackSelector, public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetType,
DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator) { DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator) {
this(null, manifest, trackSelector, dataSource, adaptiveFormatEvaluator, new SystemClock(), 0, this(null, manifest, adaptationSetType, dataSource, adaptiveFormatEvaluator, new SystemClock(),
0, false, null, null, 0); 0, 0, false, null, null, 0);
} }
/** /**
...@@ -192,7 +198,9 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -192,7 +198,9 @@ public class DashChunkSource implements ChunkSource, Output {
* *
* @param manifestFetcher A fetcher for the manifest, which must have already successfully * @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load. * completed an initial load.
* @param trackSelector Selects tracks from manifest periods to be exposed by this source. * @param adaptationSetType The type of the adaptation set exposed by this source. One of
* {@link AdaptationSet#TYPE_AUDIO}, {@link AdaptationSet#TYPE_VIDEO} and
* {@link AdaptationSet#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
...@@ -209,10 +217,10 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -209,10 +217,10 @@ public class DashChunkSource implements ChunkSource, Output {
* @param eventSourceId An identifier that gets passed to {@code eventListener} methods. * @param eventSourceId An identifier that gets passed to {@code eventListener} methods.
*/ */
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
DashTrackSelector trackSelector, DataSource dataSource, int adaptationSetType, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, Handler eventHandler,
Handler eventHandler, EventListener eventListener, int eventSourceId) { EventListener eventListener, int eventSourceId) {
this(manifestFetcher, manifestFetcher.getManifest(), trackSelector, this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetType,
dataSource, adaptiveFormatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000, dataSource, adaptiveFormatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
elapsedRealtimeOffsetMs * 1000, true, eventHandler, eventListener, eventSourceId); elapsedRealtimeOffsetMs * 1000, true, eventHandler, eventListener, eventSourceId);
} }
...@@ -222,7 +230,9 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -222,7 +230,9 @@ public class DashChunkSource implements ChunkSource, Output {
* *
* @param manifestFetcher A fetcher for the manifest, which must have already successfully * @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load. * completed an initial load.
* @param trackSelector Selects tracks from manifest periods to be exposed by this source. * @param adaptationSetType The type of the adaptation set exposed by this source. One of
* {@link AdaptationSet#TYPE_AUDIO}, {@link AdaptationSet#TYPE_VIDEO} and
* {@link AdaptationSet#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
...@@ -241,25 +251,24 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -241,25 +251,24 @@ public class DashChunkSource implements ChunkSource, Output {
* @param eventSourceId An identifier that gets passed to {@code eventListener} methods. * @param eventSourceId An identifier that gets passed to {@code eventListener} methods.
*/ */
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
DashTrackSelector trackSelector, DataSource dataSource, int adaptationSetType, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, boolean startAtLiveEdge,
boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener, Handler eventHandler, EventListener eventListener, int eventSourceId) {
int eventSourceId) { this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetType,
this(manifestFetcher, manifestFetcher.getManifest(), trackSelector,
dataSource, adaptiveFormatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000, dataSource, adaptiveFormatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
elapsedRealtimeOffsetMs * 1000, startAtLiveEdge, eventHandler, eventListener, elapsedRealtimeOffsetMs * 1000, startAtLiveEdge, eventHandler, eventListener,
eventSourceId); eventSourceId);
} }
/* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, /* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
MediaPresentationDescription initialManifest, DashTrackSelector trackSelector, MediaPresentationDescription initialManifest, int adaptationSetType,
DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs, Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs,
boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener, boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener,
int eventSourceId) { int eventSourceId) {
this.manifestFetcher = manifestFetcher; this.manifestFetcher = manifestFetcher;
this.currentManifest = initialManifest; this.currentManifest = initialManifest;
this.trackSelector = trackSelector; this.adaptationSetType = adaptationSetType;
this.dataSource = dataSource; this.dataSource = dataSource;
this.adaptiveFormatEvaluator = adaptiveFormatEvaluator; this.adaptiveFormatEvaluator = adaptiveFormatEvaluator;
this.systemClock = systemClock; this.systemClock = systemClock;
...@@ -272,7 +281,6 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -272,7 +281,6 @@ public class DashChunkSource implements ChunkSource, Output {
this.evaluation = new Evaluation(); this.evaluation = new Evaluation();
this.availableRangeValues = new long[2]; this.availableRangeValues = new long[2];
periodHolders = new SparseArray<>(); periodHolders = new SparseArray<>();
tracks = new ArrayList<>();
live = initialManifest.dynamic; live = initialManifest.dynamic;
} }
...@@ -291,30 +299,34 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -291,30 +299,34 @@ public class DashChunkSource implements ChunkSource, Output {
public boolean prepare() { public boolean prepare() {
if (!prepareCalled) { if (!prepareCalled) {
prepareCalled = true; prepareCalled = true;
try { selectTracks(currentManifest, 0);
trackSelector.selectTracks(currentManifest, 0, this);
} catch (IOException e) {
fatalError = e;
}
} }
return fatalError == null; return true;
}
@Override
public int getTrackCount() {
return tracks.size();
} }
@Override @Override
public final MediaFormat getFormat(int track) { public final TrackGroup getTracks() {
return tracks.get(track).trackFormat; return trackGroup;
} }
@Override @Override
public void enable(int track) { public void enable(int[] tracks) {
enabledTrack = tracks.get(track); int maxWidth = -1;
if (enabledTrack.isAdaptive()) { int maxHeight = -1;
enabledFormats = new Format[tracks.length];
for (int i = 0; i < tracks.length; i++) {
enabledFormats[i] = trackFormats[tracks[i]];
maxWidth = Math.max(enabledFormats[i].width, maxWidth);
maxHeight = Math.max(enabledFormats[i].height, maxHeight);
}
Arrays.sort(enabledFormats, new DecreasingBandwidthComparator());
if (enabledFormats.length > 1) {
adaptiveMaxWidth = maxWidth;
adaptiveMaxHeight = maxHeight;
adaptiveFormatEvaluator.enable(); adaptiveFormatEvaluator.enable();
} else {
adaptiveMaxWidth = -1;
adaptiveMaxHeight = -1;
} }
if (manifestFetcher != null) { if (manifestFetcher != null) {
manifestFetcher.enable(); manifestFetcher.enable();
...@@ -363,11 +375,10 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -363,11 +375,10 @@ public class DashChunkSource implements ChunkSource, Output {
evaluation.queueSize = queue.size(); evaluation.queueSize = queue.size();
if (evaluation.format == null || !lastChunkWasInitialization) { if (evaluation.format == null || !lastChunkWasInitialization) {
if (enabledTrack.isAdaptive()) { if (enabledFormats.length > 1) {
adaptiveFormatEvaluator.evaluate(queue, playbackPositionUs, enabledTrack.adaptiveFormats, adaptiveFormatEvaluator.evaluate(queue, playbackPositionUs, enabledFormats, evaluation);
evaluation);
} else { } else {
evaluation.format = enabledTrack.fixedFormat; evaluation.format = enabledFormats[0];
evaluation.trigger = Chunk.TRIGGER_MANUAL; evaluation.trigger = Chunk.TRIGGER_MANUAL;
} }
} }
...@@ -488,7 +499,7 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -488,7 +499,7 @@ public class DashChunkSource implements ChunkSource, Output {
: startingNewPeriod ? representationHolder.getFirstAvailableSegmentNum() : startingNewPeriod ? representationHolder.getFirstAvailableSegmentNum()
: queue.get(out.queueSize - 1).getNextChunkIndex(); : queue.get(out.queueSize - 1).getNextChunkIndex();
Chunk nextMediaChunk = newMediaChunk(periodHolder, representationHolder, dataSource, Chunk nextMediaChunk = newMediaChunk(periodHolder, representationHolder, dataSource,
mediaFormat, enabledTrack, segmentNum, evaluation.trigger); mediaFormat, adaptiveMaxWidth, adaptiveMaxHeight, segmentNum, evaluation.trigger);
lastChunkWasInitialization = false; lastChunkWasInitialization = false;
out.chunk = nextMediaChunk; out.chunk = nextMediaChunk;
} }
...@@ -529,7 +540,7 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -529,7 +540,7 @@ public class DashChunkSource implements ChunkSource, Output {
@Override @Override
public void disable(List<? extends MediaChunk> queue) { public void disable(List<? extends MediaChunk> queue) {
if (enabledTrack.isAdaptive()) { if (enabledFormats.length > 1) {
adaptiveFormatEvaluator.disable(); adaptiveFormatEvaluator.disable();
} }
if (manifestFetcher != null) { if (manifestFetcher != null) {
...@@ -539,72 +550,53 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -539,72 +550,53 @@ public class DashChunkSource implements ChunkSource, Output {
evaluation.format = null; evaluation.format = null;
availableRange = null; availableRange = null;
fatalError = null; fatalError = null;
enabledTrack = null; enabledFormats = null;
} }
// DashTrackSelector.Output implementation. // Private methods.
@Override private void selectTracks(MediaPresentationDescription manifest, int periodIndex) {
public void adaptiveTrack(MediaPresentationDescription manifest, int periodIndex, Period period = manifest.getPeriod(periodIndex);
int adaptationSetIndex, int[] representationIndices) { for (int i = 0; i < period.adaptationSets.size(); i++) {
if (adaptiveFormatEvaluator == null) { AdaptationSet adaptationSet = period.adaptationSets.get(i);
Log.w(TAG, "Skipping adaptive track (missing format evaluator)"); if (adaptationSet.type == adaptationSetType) {
return; // We've found an adaptation set of the exposed type.
} adaptationSetIndex = i;
AdaptationSet adaptationSet = manifest.getPeriod(periodIndex).adaptationSets.get( List<Representation> representations = adaptationSet.representations;
adaptationSetIndex); trackFormats = new Format[representations.size()];
int maxWidth = 0; MediaFormat[] trackMediaFormats = new MediaFormat[representations.size()];
int maxHeight = 0; int trackCount = 0;
Format maxHeightRepresentationFormat = null; for (int j = 0; j < trackMediaFormats.length; j++) {
Format[] representationFormats = new Format[representationIndices.length]; trackMediaFormats[trackCount] = getMediaFormat(manifest, representations.get(j).format);
for (int i = 0; i < representationFormats.length; i++) { if (trackMediaFormats[trackCount] != null) {
Format format = adaptationSet.representations.get(representationIndices[i]).format; trackFormats[trackCount++] = representations.get(j).format;
if (maxHeightRepresentationFormat == null || format.height > maxHeight) { }
maxHeightRepresentationFormat = format; }
trackGroup = new TrackGroup(adaptiveFormatEvaluator != null,
Arrays.copyOf(trackMediaFormats, trackCount));
return;
} }
maxWidth = Math.max(maxWidth, format.width);
maxHeight = Math.max(maxHeight, format.height);
representationFormats[i] = format;
} }
Arrays.sort(representationFormats, new DecreasingBandwidthComparator()); trackGroup = new TrackGroup(adaptiveFormatEvaluator != null);
long trackDurationUs = live ? C.UNKNOWN_TIME_US : manifest.duration * 1000; trackFormats = new Format[0];
String mediaMimeType = getMediaMimeType(maxHeightRepresentationFormat);
if (mediaMimeType == null) {
Log.w(TAG, "Skipped adaptive track (unknown media mime type)");
return;
}
MediaFormat trackFormat = getTrackFormat(adaptationSet.type, maxHeightRepresentationFormat,
mediaMimeType, trackDurationUs);
if (trackFormat == null) {
Log.w(TAG, "Skipped adaptive track (unknown media format)");
return;
}
tracks.add(new ExposedTrack(trackFormat.copyAsAdaptive(null), adaptationSetIndex,
representationFormats, maxWidth, maxHeight));
} }
@Override private MediaFormat getMediaFormat(MediaPresentationDescription manifest,
public void fixedTrack(MediaPresentationDescription manifest, int periodIndex, Format representationFormat) {
int adaptationSetIndex, int representationIndex) {
List<AdaptationSet> adaptationSets = manifest.getPeriod(periodIndex).adaptationSets;
AdaptationSet adaptationSet = adaptationSets.get(adaptationSetIndex);
Format representationFormat = adaptationSet.representations.get(representationIndex).format;
String mediaMimeType = getMediaMimeType(representationFormat); String mediaMimeType = getMediaMimeType(representationFormat);
if (mediaMimeType == null) { if (mediaMimeType == null) {
Log.w(TAG, "Skipped track " + representationFormat.id + " (unknown media mime type)"); Log.w(TAG, "Skipped track " + representationFormat.id + " (unknown media mime type)");
return; return null;
} }
MediaFormat trackFormat = getTrackFormat(adaptationSet.type, representationFormat, MediaFormat trackFormat = getTrackFormat(adaptationSetType, representationFormat,
mediaMimeType, manifest.dynamic ? C.UNKNOWN_TIME_US : manifest.duration * 1000); mediaMimeType, manifest.dynamic ? C.UNKNOWN_TIME_US : manifest.duration * 1000);
if (trackFormat == null) { if (trackFormat == null) {
Log.w(TAG, "Skipped track " + representationFormat.id + " (unknown media format)"); Log.w(TAG, "Skipped track " + representationFormat.id + " (unknown media format)");
return; return null;
} }
tracks.add(new ExposedTrack(trackFormat, adaptationSetIndex, representationFormat)); return trackFormat;
} }
// Private methods.
// Visible for testing. // Visible for testing.
/* package */ TimeRange getAvailableRange() { /* package */ TimeRange getAvailableRange() {
return availableRange; return availableRange;
...@@ -629,8 +621,8 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -629,8 +621,8 @@ public class DashChunkSource implements ChunkSource, Output {
MediaFormat.NO_VALUE, durationUs, format.audioChannels, format.audioSamplingRate, null, MediaFormat.NO_VALUE, durationUs, format.audioChannels, format.audioSamplingRate, null,
format.language); format.language);
case AdaptationSet.TYPE_TEXT: case AdaptationSet.TYPE_TEXT:
return MediaFormat.createTextFormat(format.id, mediaMimeType, format.bitrate, return MediaFormat.createTextFormat(format.id, mediaMimeType, format.bitrate, durationUs,
durationUs, format.language); format.language);
default: default:
return null; return null;
} }
...@@ -680,9 +672,9 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -680,9 +672,9 @@ public class DashChunkSource implements ChunkSource, Output {
extractor, manifestIndex); extractor, manifestIndex);
} }
protected Chunk newMediaChunk( protected Chunk newMediaChunk(PeriodHolder periodHolder,
PeriodHolder periodHolder, RepresentationHolder representationHolder, DataSource dataSource, RepresentationHolder representationHolder, DataSource dataSource, MediaFormat mediaFormat,
MediaFormat mediaFormat, ExposedTrack enabledTrack, int segmentNum, int trigger) { int adaptiveMaxWidth, int adaptiveMaxHeight, int segmentNum, int trigger) {
Representation representation = representationHolder.representation; Representation representation = representationHolder.representation;
Format format = representation.format; Format format = representation.format;
long startTimeUs = representationHolder.getSegmentStartTimeUs(segmentNum); long startTimeUs = representationHolder.getSegmentStartTimeUs(segmentNum);
...@@ -694,13 +686,12 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -694,13 +686,12 @@ public class DashChunkSource implements ChunkSource, Output {
long sampleOffsetUs = periodHolder.startTimeUs - representation.presentationTimeOffsetUs; long sampleOffsetUs = periodHolder.startTimeUs - representation.presentationTimeOffsetUs;
if (mimeTypeIsRawText(format.mimeType)) { if (mimeTypeIsRawText(format.mimeType)) {
return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_INITIAL, format, return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_INITIAL, format,
startTimeUs, endTimeUs, segmentNum, enabledTrack.trackFormat, null, startTimeUs, endTimeUs, segmentNum, mediaFormat, null, periodHolder.localIndex);
periodHolder.localIndex);
} else { } else {
boolean isMediaFormatFinal = (mediaFormat != null); boolean isMediaFormatFinal = (mediaFormat != null);
return new ContainerMediaChunk(dataSource, dataSpec, trigger, format, startTimeUs, endTimeUs, return new ContainerMediaChunk(dataSource, dataSpec, trigger, format, startTimeUs, endTimeUs,
segmentNum, sampleOffsetUs, representationHolder.extractorWrapper, mediaFormat, segmentNum, sampleOffsetUs, representationHolder.extractorWrapper, mediaFormat,
enabledTrack.adaptiveMaxWidth, enabledTrack.adaptiveMaxHeight, periodHolder.drmInitData, adaptiveMaxWidth, adaptiveMaxHeight, periodHolder.drmInitData,
isMediaFormatFinal, periodHolder.localIndex); isMediaFormatFinal, periodHolder.localIndex);
} }
} }
...@@ -752,10 +743,10 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -752,10 +743,10 @@ public class DashChunkSource implements ChunkSource, Output {
try { try {
int periodHolderCount = periodHolders.size(); int periodHolderCount = periodHolders.size();
if (periodHolderCount > 0) { if (periodHolderCount > 0) {
periodHolders.valueAt(0).updatePeriod(manifest, 0, enabledTrack); periodHolders.valueAt(0).updatePeriod(manifest, 0, adaptationSetIndex);
if (periodHolderCount > 1) { if (periodHolderCount > 1) {
int lastIndex = periodHolderCount - 1; int lastIndex = periodHolderCount - 1;
periodHolders.valueAt(lastIndex).updatePeriod(manifest, lastIndex, enabledTrack); periodHolders.valueAt(lastIndex).updatePeriod(manifest, lastIndex, adaptationSetIndex);
} }
} }
} catch (BehindLiveWindowException e) { } catch (BehindLiveWindowException e) {
...@@ -765,7 +756,8 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -765,7 +756,8 @@ public class DashChunkSource implements ChunkSource, Output {
// Add new periods. // Add new periods.
for (int i = periodHolders.size(); i < manifest.getPeriodCount(); i++) { for (int i = periodHolders.size(); i < manifest.getPeriodCount(); i++) {
PeriodHolder holder = new PeriodHolder(nextPeriodHolderIndex, manifest, i, enabledTrack); PeriodHolder holder = new PeriodHolder(nextPeriodHolderIndex, manifest, i, adaptationSetIndex,
enabledFormats);
periodHolders.put(nextPeriodHolderIndex, holder); periodHolders.put(nextPeriodHolderIndex, holder);
nextPeriodHolderIndex++; nextPeriodHolderIndex++;
} }
...@@ -813,45 +805,6 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -813,45 +805,6 @@ public class DashChunkSource implements ChunkSource, Output {
// Protected classes. // Protected classes.
protected static final class ExposedTrack {
public final MediaFormat trackFormat;
public final int adaptiveMaxWidth;
public final int adaptiveMaxHeight;
private final int adaptationSetIndex;
// Non-adaptive track variables.
private final Format fixedFormat;
// Adaptive track variables.
private final Format[] adaptiveFormats;
public ExposedTrack(MediaFormat trackFormat, int adaptationSetIndex, Format fixedFormat) {
this.trackFormat = trackFormat;
this.adaptationSetIndex = adaptationSetIndex;
this.fixedFormat = fixedFormat;
this.adaptiveFormats = null;
this.adaptiveMaxWidth = -1;
this.adaptiveMaxHeight = -1;
}
public ExposedTrack(MediaFormat trackFormat, int adaptationSetIndex, Format[] adaptiveFormats,
int maxWidth, int maxHeight) {
this.trackFormat = trackFormat;
this.adaptationSetIndex = adaptationSetIndex;
this.adaptiveFormats = adaptiveFormats;
this.adaptiveMaxWidth = maxWidth;
this.adaptiveMaxHeight = maxHeight;
this.fixedFormat = null;
}
public boolean isAdaptive() {
return adaptiveFormats != null;
}
}
protected static final class RepresentationHolder { protected static final class RepresentationHolder {
public final boolean mimeTypeIsRawText; public final boolean mimeTypeIsRawText;
...@@ -966,26 +919,25 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -966,26 +919,25 @@ public class DashChunkSource implements ChunkSource, Output {
private long availableEndTimeUs; private long availableEndTimeUs;
public PeriodHolder(int localIndex, MediaPresentationDescription manifest, int manifestIndex, public PeriodHolder(int localIndex, MediaPresentationDescription manifest, int manifestIndex,
ExposedTrack selectedTrack) { int adaptationSetIndex, Format[] enabledFormats) {
this.localIndex = localIndex; this.localIndex = localIndex;
Period period = manifest.getPeriod(manifestIndex); Period period = manifest.getPeriod(manifestIndex);
long periodDurationUs = getPeriodDurationUs(manifest, manifestIndex); long periodDurationUs = getPeriodDurationUs(manifest, manifestIndex);
AdaptationSet adaptationSet = period.adaptationSets.get(selectedTrack.adaptationSetIndex); AdaptationSet adaptationSet = period.adaptationSets.get(adaptationSetIndex);
List<Representation> representations = adaptationSet.representations; List<Representation> representations = adaptationSet.representations;
startTimeUs = period.startMs * 1000; startTimeUs = period.startMs * 1000;
drmInitData = getDrmInitData(adaptationSet); drmInitData = getDrmInitData(adaptationSet);
if (!selectedTrack.isAdaptive()) { if (enabledFormats.length > 1) {
representationIndices = new int[] { representationIndices = new int[enabledFormats.length];
getRepresentationIndex(representations, selectedTrack.fixedFormat.id)}; for (int j = 0; j < enabledFormats.length; j++) {
} else { representationIndices[j] = getRepresentationIndex(representations, enabledFormats[j].id);
representationIndices = new int[selectedTrack.adaptiveFormats.length];
for (int j = 0; j < selectedTrack.adaptiveFormats.length; j++) {
representationIndices[j] = getRepresentationIndex(
representations, selectedTrack.adaptiveFormats[j].id);
} }
} else {
representationIndices = new int[] {
getRepresentationIndex(representations, enabledFormats[0].id)};
} }
representationHolders = new HashMap<>(); representationHolders = new HashMap<>();
...@@ -1000,11 +952,11 @@ public class DashChunkSource implements ChunkSource, Output { ...@@ -1000,11 +952,11 @@ public class DashChunkSource implements ChunkSource, Output {
} }
public void updatePeriod(MediaPresentationDescription manifest, int manifestIndex, public void updatePeriod(MediaPresentationDescription manifest, int manifestIndex,
ExposedTrack selectedTrack) throws BehindLiveWindowException { int adaptationSetIndex) throws BehindLiveWindowException {
Period period = manifest.getPeriod(manifestIndex); Period period = manifest.getPeriod(manifestIndex);
long periodDurationUs = getPeriodDurationUs(manifest, manifestIndex); long periodDurationUs = getPeriodDurationUs(manifest, manifestIndex);
List<Representation> representations = period.adaptationSets List<Representation> representations = period.adaptationSets
.get(selectedTrack.adaptationSetIndex).representations; .get(adaptationSetIndex).representations;
for (int j = 0; j < representationIndices.length; j++) { for (int j = 0; j < representationIndices.length; j++) {
Representation representation = representations.get(representationIndices[j]); Representation representation = representations.get(representationIndices[j]);
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import java.io.IOException;
/**
* Specifies a track selection from a {@link Period} of a media presentation description.
*/
public interface DashTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified representations in the specified
* adaptation set.
*
* @param manifest The media presentation description being processed.
* @param periodIndex The index of the period being processed.
* @param adaptationSetIndex The index of the adaptation set within which the representations
* are located.
* @param representationIndices The indices of the track within the element.
*/
void adaptiveTrack(MediaPresentationDescription manifest, int periodIndex,
int adaptationSetIndex, int[] representationIndices);
/**
* Outputs an fixed track corresponding to the specified representation in the specified
* adaptation set.
*
* @param manifest The media presentation description being processed.
* @param periodIndex The index of the period being processed.
* @param adaptationSetIndex The index of the adaptation set within which the track is located.
* @param representationIndex The index of the representation within the adaptation set.
*/
void fixedTrack(MediaPresentationDescription manifest, int periodIndex, int adaptationSetIndex,
int representationIndex);
}
/**
* Outputs a track selection for a given period.
*
* @param manifest the media presentation description to process.
* @param periodIndex The index of the period to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the period.
*/
void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output)
throws IOException;
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import com.google.android.exoplayer.util.Util;
import android.content.Context;
import java.io.IOException;
/**
* A default {@link DashTrackSelector} implementation.
*/
// TODO: Add more configuration options (e.g. ability to disable adaptive track output).
public final class DefaultDashTrackSelector implements DashTrackSelector {
private final int adaptationSetType;
private final Context context;
private final boolean filterVideoRepresentations;
private final boolean filterProtectedHdContent;
/**
* @param context A context. May be null if {@code filterVideoRepresentations == false}.
* @param filterVideoRepresentations Whether video representations should be filtered according to
* the capabilities of the device. It is strongly recommended to set this to {@code true},
* unless the application has already verified that all representations are playable.
* @param filterProtectedHdContent Whether video representations that are both drm protected and
* high definition should be filtered when tracks are built. If
* {@code filterVideoRepresentations == false} then this parameter is ignored.
*/
public static DefaultDashTrackSelector newVideoInstance(Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_VIDEO, context,
filterVideoRepresentations, filterProtectedHdContent);
}
public static DefaultDashTrackSelector newAudioInstance() {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_AUDIO, null, false, false);
}
public static DefaultDashTrackSelector newTextInstance() {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_TEXT, null, false, false);
}
private DefaultDashTrackSelector(int adaptationSetType, Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
this.adaptationSetType = adaptationSetType;
this.context = context;
this.filterVideoRepresentations = filterVideoRepresentations;
this.filterProtectedHdContent = filterProtectedHdContent;
}
@Override
public void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output)
throws IOException {
Period period = manifest.getPeriod(periodIndex);
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
if (adaptationSet.type == adaptationSetType) {
if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
int[] representations;
if (filterVideoRepresentations) {
representations = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, adaptationSet.representations, null,
filterProtectedHdContent && adaptationSet.hasContentProtection());
} else {
representations = Util.firstIntegersArray(adaptationSet.representations.size());
}
int representationCount = representations.length;
if (representationCount > 1) {
output.adaptiveTrack(manifest, periodIndex, i, representations);
}
for (int j = 0; j < representationCount; j++) {
output.fixedTrack(manifest, periodIndex, i, representations[j]);
}
} else {
for (int j = 0; j < adaptationSet.representations.size(); j++) {
output.fixedTrack(manifest, periodIndex, i, j);
}
}
}
}
}
}
...@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormatHolder; ...@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.ParserException; import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder; import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource; import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.drm.DrmInitData; import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.upstream.Allocator; import com.google.android.exoplayer.upstream.Allocator;
import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.DataSource;
...@@ -174,7 +175,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu ...@@ -174,7 +175,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
private boolean prepared; private boolean prepared;
private int enabledTrackCount; private int enabledTrackCount;
private MediaFormat[] mediaFormats; private TrackGroup[] tracks;
private long durationUs; private long durationUs;
private boolean[] pendingMediaFormat; private boolean[] pendingMediaFormat;
private boolean[] pendingResets; private boolean[] pendingResets;
...@@ -262,14 +263,14 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu ...@@ -262,14 +263,14 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
if (seekMap != null && tracksBuilt && haveFormatsForAllTracks()) { if (seekMap != null && tracksBuilt && haveFormatsForAllTracks()) {
int trackCount = sampleQueues.size(); int trackCount = sampleQueues.size();
tracks = new TrackGroup[trackCount];
trackEnabledStates = new boolean[trackCount]; trackEnabledStates = new boolean[trackCount];
pendingResets = new boolean[trackCount]; pendingResets = new boolean[trackCount];
pendingMediaFormat = new boolean[trackCount]; pendingMediaFormat = new boolean[trackCount];
mediaFormats = new MediaFormat[trackCount];
durationUs = C.UNKNOWN_TIME_US; durationUs = C.UNKNOWN_TIME_US;
for (int i = 0; i < trackCount; i++) { for (int i = 0; i < trackCount; i++) {
MediaFormat format = sampleQueues.valueAt(i).getFormat(); MediaFormat format = sampleQueues.valueAt(i).getFormat();
mediaFormats[i] = format; tracks[i] = new TrackGroup(format);
if (format.durationUs > durationUs) { if (format.durationUs > durationUs) {
durationUs = format.durationUs; durationUs = format.durationUs;
} }
...@@ -292,24 +293,23 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu ...@@ -292,24 +293,23 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
} }
@Override @Override
public int getTrackCount() { public int getTrackGroupCount() {
return sampleQueues.size(); return tracks.length;
} }
@Override @Override
public MediaFormat getFormat(int track) { public TrackGroup getTrackGroup(int group) {
Assertions.checkState(prepared); return tracks[group];
return mediaFormats[track];
} }
@Override @Override
public TrackStream enable(int track, long positionUs) { public TrackStream enable(int group, int[] tracks, long positionUs) {
Assertions.checkState(prepared); Assertions.checkState(prepared);
Assertions.checkState(!trackEnabledStates[track]); Assertions.checkState(!trackEnabledStates[group]);
enabledTrackCount++; enabledTrackCount++;
trackEnabledStates[track] = true; trackEnabledStates[group] = true;
pendingMediaFormat[track] = true; pendingMediaFormat[group] = true;
pendingResets[track] = false; pendingResets[group] = false;
if (enabledTrackCount == 1) { if (enabledTrackCount == 1) {
// Treat all enables in non-seekable media as being from t=0. // Treat all enables in non-seekable media as being from t=0.
positionUs = !seekMap.isSeekable() ? 0 : positionUs; positionUs = !seekMap.isSeekable() ? 0 : positionUs;
...@@ -317,7 +317,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu ...@@ -317,7 +317,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
lastSeekPositionUs = positionUs; lastSeekPositionUs = positionUs;
restartFrom(positionUs); restartFrom(positionUs);
} }
return new TrackStreamImpl(track); return new TrackStreamImpl(group);
} }
/* package */ void disable(int track) { /* package */ void disable(int track) {
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.hls;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import android.content.Context;
import android.text.TextUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* A default {@link HlsTrackSelector} implementation.
*/
public final class DefaultHlsTrackSelector implements HlsTrackSelector {
private static final int TYPE_DEFAULT = 0;
private static final int TYPE_VTT = 1;
private final Context context;
private final int type;
/**
* Creates a {@link DefaultHlsTrackSelector} that selects the streams defined in the playlist.
*
* @param context A context.
* @return The selector instance.
*/
public static DefaultHlsTrackSelector newDefaultInstance(Context context) {
return new DefaultHlsTrackSelector(context, TYPE_DEFAULT);
}
/**
* Creates a {@link DefaultHlsTrackSelector} that selects subtitle renditions.
*
* @return The selector instance.
*/
public static DefaultHlsTrackSelector newVttInstance() {
return new DefaultHlsTrackSelector(null, TYPE_VTT);
}
private DefaultHlsTrackSelector(Context context, int type) {
this.context = context;
this.type = type;
}
@Override
public void selectTracks(HlsMasterPlaylist playlist, Output output) throws IOException {
if (type == TYPE_VTT) {
List<Variant> subtitleVariants = playlist.subtitles;
if (subtitleVariants != null && !subtitleVariants.isEmpty()) {
for (int i = 0; i < subtitleVariants.size(); i++) {
output.fixedTrack(playlist, subtitleVariants.get(i));
}
}
return;
}
// Type is TYPE_DEFAULT.
ArrayList<Variant> enabledVariantList = new ArrayList<>();
int[] variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, playlist.variants, null, false);
for (int i = 0; i < variantIndices.length; i++) {
enabledVariantList.add(playlist.variants.get(variantIndices[i]));
}
ArrayList<Variant> definiteVideoVariants = new ArrayList<>();
ArrayList<Variant> definiteAudioOnlyVariants = new ArrayList<>();
for (int i = 0; i < enabledVariantList.size(); i++) {
Variant variant = enabledVariantList.get(i);
if (variant.format.height > 0 || variantHasExplicitCodecWithPrefix(variant, "avc")) {
definiteVideoVariants.add(variant);
} else if (variantHasExplicitCodecWithPrefix(variant, "mp4a")) {
definiteAudioOnlyVariants.add(variant);
}
}
if (!definiteVideoVariants.isEmpty()) {
// We've identified some variants as definitely containing video. Assume variants within the
// master playlist are marked consistently, and hence that we have the full set. Filter out
// any other variants, which are likely to be audio only.
enabledVariantList = definiteVideoVariants;
} else if (definiteAudioOnlyVariants.size() < enabledVariantList.size()) {
// We've identified some variants, but not all, as being audio only. Filter them out to leave
// the remaining variants, which are likely to contain video.
enabledVariantList.removeAll(definiteAudioOnlyVariants);
} else {
// Leave the enabled variants unchanged. They're likely either all video or all audio.
}
if (enabledVariantList.size() > 1) {
Variant[] enabledVariants = new Variant[enabledVariantList.size()];
enabledVariantList.toArray(enabledVariants);
output.adaptiveTrack(playlist, enabledVariants);
}
for (int i = 0; i < enabledVariantList.size(); i++) {
output.fixedTrack(playlist, enabledVariantList.get(i));
}
}
private static boolean variantHasExplicitCodecWithPrefix(Variant variant, String prefix) {
String codecs = variant.format.codecs;
if (TextUtils.isEmpty(codecs)) {
return false;
}
String[] codecArray = codecs.split("(\\s*,\\s*)|(\\s*$)");
for (int i = 0; i < codecArray.length; i++) {
if (codecArray[i].startsWith(prefix)) {
return true;
}
}
return false;
}
}
...@@ -39,6 +39,7 @@ import com.google.android.exoplayer.util.Util; ...@@ -39,6 +39,7 @@ import com.google.android.exoplayer.util.Util;
import android.net.Uri; import android.net.Uri;
import android.os.SystemClock; import android.os.SystemClock;
import android.text.TextUtils;
import android.util.Log; import android.util.Log;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
...@@ -54,13 +55,16 @@ import java.util.Locale; ...@@ -54,13 +55,16 @@ import java.util.Locale;
/** /**
* A temporary test source of HLS chunks. * A temporary test source of HLS chunks.
*/ */
public class HlsChunkSource implements HlsTrackSelector.Output { public class HlsChunkSource {
/** /**
* Interface definition for a callback to be notified of {@link HlsChunkSource} events. * Interface definition for a callback to be notified of {@link HlsChunkSource} events.
*/ */
public interface EventListener extends BaseChunkSampleSourceEventListener {} public interface EventListener extends BaseChunkSampleSourceEventListener {}
public static final int TYPE_DEFAULT = 0;
public static final int TYPE_VTT = 1;
/** /**
* Adaptive switching is disabled. * Adaptive switching is disabled.
* <p> * <p>
...@@ -119,11 +123,10 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -119,11 +123,10 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
private static final String WEBVTT_FILE_EXTENSION = ".webvtt"; private static final String WEBVTT_FILE_EXTENSION = ".webvtt";
private static final float BANDWIDTH_FRACTION = 0.8f; private static final float BANDWIDTH_FRACTION = 0.8f;
private final boolean isMaster; private final int type;
private final DataSource dataSource; private final DataSource dataSource;
private final HlsPlaylistParser playlistParser; private final HlsPlaylistParser playlistParser;
private final HlsMasterPlaylist masterPlaylist; private final HlsMasterPlaylist masterPlaylist;
private final HlsTrackSelector trackSelector;
private final BandwidthMeter bandwidthMeter; private final BandwidthMeter bandwidthMeter;
private final PtsTimestampAdjusterProvider timestampAdjusterProvider; private final PtsTimestampAdjusterProvider timestampAdjusterProvider;
private final int adaptiveMode; private final int adaptiveMode;
...@@ -131,22 +134,6 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -131,22 +134,6 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
private final long minBufferDurationToSwitchUpUs; private final long minBufferDurationToSwitchUpUs;
private final long maxBufferDurationToSwitchDownUs; private final long maxBufferDurationToSwitchDownUs;
// TODO: Expose tracks.
private final ArrayList<ExposedTrack> tracks;
private int selectedTrackIndex;
// A list of variants considered during playback, ordered by decreasing bandwidth. The following
// three arrays are of the same length and are ordered in the same way (i.e. variantPlaylists[i],
// variantLastPlaylistLoadTimesMs[i] and variantBlacklistTimes[i] all correspond to variants[i]).
private Variant[] variants;
private HlsMediaPlaylist[] variantPlaylists;
private long[] variantLastPlaylistLoadTimesMs;
private long[] variantBlacklistTimes;
// The index in variants of the currently selected variant.
private int selectedVariantIndex;
private boolean prepareCalled; private boolean prepareCalled;
private byte[] scratchSpace; private byte[] scratchSpace;
private boolean live; private boolean live;
...@@ -158,14 +145,24 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -158,14 +145,24 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
private String encryptionIvString; private String encryptionIvString;
private byte[] encryptionIv; private byte[] encryptionIv;
// Properties of exposed tracks.
private Variant[] exposedVariants;
// Properties of enabled variants.
private Variant[] enabledVariants;
private HlsMediaPlaylist[] enabledVariantPlaylists;
private long[] enabledVariantLastPlaylistLoadTimesMs;
private long[] enabledVariantBlacklistTimes;
private int adaptiveMaxWidth;
private int adaptiveMaxHeight;
private int selectedVariantIndex;
/** /**
* @param isMaster True if this is the master source for the playback. False otherwise. Each * @param type The type of chunk provided by the source. One of {@link #TYPE_DEFAULT} and
* playback must have exactly one master source, which should be the source providing video * {@link #TYPE_VTT}.
* chunks (or audio chunks for audio only playbacks).
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param playlistUrl The playlist URL. * @param playlistUrl The playlist URL.
* @param playlist The hls playlist. * @param playlist The hls playlist.
* @param trackSelector Selects tracks to be exposed by this source.
* @param bandwidthMeter Provides an estimate of the currently available bandwidth. * @param bandwidthMeter Provides an estimate of the currently available bandwidth.
* @param timestampAdjusterProvider A provider of {@link PtsTimestampAdjuster} instances. If * @param timestampAdjusterProvider A provider of {@link PtsTimestampAdjuster} instances. If
* multiple {@link HlsChunkSource}s are used for a single playback, they should all share the * multiple {@link HlsChunkSource}s are used for a single playback, they should all share the
...@@ -174,22 +171,19 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -174,22 +171,19 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
* {@link #ADAPTIVE_MODE_NONE}, {@link #ADAPTIVE_MODE_ABRUPT} and * {@link #ADAPTIVE_MODE_NONE}, {@link #ADAPTIVE_MODE_ABRUPT} and
* {@link #ADAPTIVE_MODE_SPLICE}. * {@link #ADAPTIVE_MODE_SPLICE}.
*/ */
public HlsChunkSource(boolean isMaster, DataSource dataSource, String playlistUrl, public HlsChunkSource(int type, DataSource dataSource, String playlistUrl, HlsPlaylist playlist,
HlsPlaylist playlist, HlsTrackSelector trackSelector, BandwidthMeter bandwidthMeter, BandwidthMeter bandwidthMeter, PtsTimestampAdjusterProvider timestampAdjusterProvider,
PtsTimestampAdjusterProvider timestampAdjusterProvider, int adaptiveMode) { int adaptiveMode) {
this(isMaster, dataSource, playlistUrl, playlist, trackSelector, bandwidthMeter, this(type, dataSource, playlistUrl, playlist, bandwidthMeter, timestampAdjusterProvider,
timestampAdjusterProvider, adaptiveMode, DEFAULT_MIN_BUFFER_TO_SWITCH_UP_MS, adaptiveMode, DEFAULT_MIN_BUFFER_TO_SWITCH_UP_MS, DEFAULT_MAX_BUFFER_TO_SWITCH_DOWN_MS);
DEFAULT_MAX_BUFFER_TO_SWITCH_DOWN_MS);
} }
/** /**
* @param isMaster True if this is the master source for the playback. False otherwise. Each * @param type The type of chunk provided by the source. One of {@link #TYPE_DEFAULT} and
* playback must have exactly one master source, which should be the source providing video * {@link #TYPE_VTT}.
* chunks (or audio chunks for audio only playbacks).
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param playlistUrl The playlist URL. * @param playlistUrl The playlist URL.
* @param playlist The hls playlist. * @param playlist The hls playlist.
* @param trackSelector Selects tracks to be exposed by this source.
* @param bandwidthMeter Provides an estimate of the currently available bandwidth. * @param bandwidthMeter Provides an estimate of the currently available bandwidth.
* @param timestampAdjusterProvider A provider of {@link PtsTimestampAdjuster} instances. If * @param timestampAdjusterProvider A provider of {@link PtsTimestampAdjuster} instances. If
* multiple {@link HlsChunkSource}s are used for a single playback, they should all share the * multiple {@link HlsChunkSource}s are used for a single playback, they should all share the
...@@ -202,13 +196,11 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -202,13 +196,11 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
* @param maxBufferDurationToSwitchDownMs The maximum duration of media that needs to be buffered * @param maxBufferDurationToSwitchDownMs The maximum duration of media that needs to be buffered
* for a switch to a lower quality variant to be considered. * for a switch to a lower quality variant to be considered.
*/ */
public HlsChunkSource(boolean isMaster, DataSource dataSource, String playlistUrl, public HlsChunkSource(int type, DataSource dataSource, String playlistUrl, HlsPlaylist playlist,
HlsPlaylist playlist, HlsTrackSelector trackSelector, BandwidthMeter bandwidthMeter, BandwidthMeter bandwidthMeter, PtsTimestampAdjusterProvider timestampAdjusterProvider,
PtsTimestampAdjusterProvider timestampAdjusterProvider, int adaptiveMode, int adaptiveMode, long minBufferDurationToSwitchUpMs, long maxBufferDurationToSwitchDownMs) {
long minBufferDurationToSwitchUpMs, long maxBufferDurationToSwitchDownMs) { this.type = type;
this.isMaster = isMaster;
this.dataSource = dataSource; this.dataSource = dataSource;
this.trackSelector = trackSelector;
this.bandwidthMeter = bandwidthMeter; this.bandwidthMeter = bandwidthMeter;
this.timestampAdjusterProvider = timestampAdjusterProvider; this.timestampAdjusterProvider = timestampAdjusterProvider;
this.adaptiveMode = adaptiveMode; this.adaptiveMode = adaptiveMode;
...@@ -216,8 +208,6 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -216,8 +208,6 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
maxBufferDurationToSwitchDownUs = maxBufferDurationToSwitchDownMs * 1000; maxBufferDurationToSwitchDownUs = maxBufferDurationToSwitchDownMs * 1000;
baseUri = playlist.baseUri; baseUri = playlist.baseUri;
playlistParser = new HlsPlaylistParser(); playlistParser = new HlsPlaylistParser();
tracks = new ArrayList<>();
if (playlist.type == HlsPlaylist.TYPE_MASTER) { if (playlist.type == HlsPlaylist.TYPE_MASTER) {
masterPlaylist = (HlsMasterPlaylist) playlist; masterPlaylist = (HlsMasterPlaylist) playlist;
} else { } else {
...@@ -250,14 +240,11 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -250,14 +240,11 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
public boolean prepare() { public boolean prepare() {
if (!prepareCalled) { if (!prepareCalled) {
prepareCalled = true; prepareCalled = true;
try { processMasterPlaylist(masterPlaylist);
trackSelector.selectTracks(masterPlaylist, this); // TODO[REFACTOR]: Come up with a sane default here.
selectTrack(0); selectTracks(new int[] {0});
} catch (IOException e) {
fatalError = e;
}
} }
return fatalError == null; return true;
} }
/** /**
...@@ -290,50 +277,69 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -290,50 +277,69 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
* @return The number of tracks. * @return The number of tracks.
*/ */
public int getTrackCount() { public int getTrackCount() {
return tracks.size(); return exposedVariants.length;
} }
/** /**
* Returns the variant corresponding to the fixed track at the specified index, or null if the * Returns the format of the track at the specified index.
* track at the specified index is adaptive.
* <p> * <p>
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared.
* *
* @param index The track index. * @param index The track index.
* @return The variant corresponding to the fixed track, or null if the track is adaptive. * @return The format of the track.
*/
public Variant getFixedTrackVariant(int index) {
Variant[] variants = tracks.get(index).variants;
return variants.length == 1 ? variants[0] : null;
}
/**
* Returns the currently selected track index.
* <p>
* This method should only be called after the source has been prepared.
*
* @return The currently selected track index.
*/ */
public int getSelectedTrackIndex() { public Format getTrackFormat(int index) {
return selectedTrackIndex; return exposedVariants[index].format;
} }
/** /**
* Selects a track for use. * Selects a tracks for use.
* <p> * <p>
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared.
* *
* @param index The track index. * @param tracks The track indices.
*/ */
public void selectTrack(int index) { public void selectTracks(int[] tracks) {
selectedTrackIndex = index; enabledVariants = new Variant[tracks.length];
ExposedTrack selectedTrack = tracks.get(selectedTrackIndex); enabledVariantPlaylists = new HlsMediaPlaylist[enabledVariants.length];
selectedVariantIndex = selectedTrack.defaultVariantIndex; enabledVariantLastPlaylistLoadTimesMs = new long[enabledVariants.length];
variants = selectedTrack.variants; enabledVariantBlacklistTimes = new long[enabledVariants.length];
variantPlaylists = new HlsMediaPlaylist[variants.length]; // Construct and sort the enabled variants.
variantLastPlaylistLoadTimesMs = new long[variants.length]; for (int i = 0; i < tracks.length; i++) {
variantBlacklistTimes = new long[variants.length]; enabledVariants[i] = exposedVariants[tracks[i]];
}
Arrays.sort(enabledVariants, new Comparator<Variant>() {
private final Comparator<Format> formatComparator =
new Format.DecreasingBandwidthComparator();
@Override
public int compare(Variant first, Variant second) {
return formatComparator.compare(first.format, second.format);
}
});
// Determine the initial variant index and maximum video dimensions.
selectedVariantIndex = 0;
int maxWidth = -1;
int maxHeight = -1;
int minOriginalVariantIndex = Integer.MAX_VALUE;
for (int i = 0; i < enabledVariants.length; i++) {
int originalVariantIndex = masterPlaylist.variants.indexOf(enabledVariants[i]);
if (originalVariantIndex < minOriginalVariantIndex) {
minOriginalVariantIndex = originalVariantIndex;
selectedVariantIndex = i;
}
Format variantFormat = enabledVariants[i].format;
maxWidth = Math.max(variantFormat.width, maxWidth);
maxHeight = Math.max(variantFormat.height, maxHeight);
}
if (tracks.length > 1) {
// TODO: We should allow the default values to be passed through the constructor.
// TODO: Print a warning if resolution tags are omitted.
maxWidth = maxWidth > 0 ? maxWidth : 1920;
maxHeight = maxHeight > 0 ? maxHeight : 1080;
} else {
adaptiveMaxWidth = -1;
adaptiveMaxHeight = -1;
}
} }
/** /**
...@@ -342,7 +348,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -342,7 +348,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared.
*/ */
public void seek() { public void seek() {
if (isMaster) { if (type == TYPE_DEFAULT) {
timestampAdjusterProvider.reset(); timestampAdjusterProvider.reset();
} }
} }
...@@ -377,11 +383,11 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -377,11 +383,11 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
} else { } else {
nextVariantIndex = getNextVariantIndex(previousTsChunk, playbackPositionUs); nextVariantIndex = getNextVariantIndex(previousTsChunk, playbackPositionUs);
switchingVariantSpliced = previousTsChunk != null switchingVariantSpliced = previousTsChunk != null
&& !variants[nextVariantIndex].format.equals(previousTsChunk.format) && !enabledVariants[nextVariantIndex].format.equals(previousTsChunk.format)
&& adaptiveMode == ADAPTIVE_MODE_SPLICE; && adaptiveMode == ADAPTIVE_MODE_SPLICE;
} }
HlsMediaPlaylist mediaPlaylist = variantPlaylists[nextVariantIndex]; HlsMediaPlaylist mediaPlaylist = enabledVariantPlaylists[nextVariantIndex];
if (mediaPlaylist == null) { if (mediaPlaylist == null) {
// We don't have the media playlist for the next variant. Request it now. // We don't have the media playlist for the next variant. Request it now.
out.chunk = newMediaPlaylistChunk(nextVariantIndex); out.chunk = newMediaPlaylistChunk(nextVariantIndex);
...@@ -459,7 +465,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -459,7 +465,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
} }
long endTimeUs = startTimeUs + (long) (segment.durationSecs * C.MICROS_PER_SECOND); long endTimeUs = startTimeUs + (long) (segment.durationSecs * C.MICROS_PER_SECOND);
int trigger = Chunk.TRIGGER_UNSPECIFIED; int trigger = Chunk.TRIGGER_UNSPECIFIED;
Format format = variants[selectedVariantIndex].format; Format format = enabledVariants[selectedVariantIndex].format;
// Configure the extractor that will read the chunk. // Configure the extractor that will read the chunk.
HlsExtractorWrapper extractorWrapper; HlsExtractorWrapper extractorWrapper;
...@@ -477,7 +483,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -477,7 +483,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
switchingVariantSpliced, MediaFormat.NO_VALUE, MediaFormat.NO_VALUE); switchingVariantSpliced, MediaFormat.NO_VALUE, MediaFormat.NO_VALUE);
} else if (lastPathSegment.endsWith(WEBVTT_FILE_EXTENSION) } else if (lastPathSegment.endsWith(WEBVTT_FILE_EXTENSION)
|| lastPathSegment.endsWith(VTT_FILE_EXTENSION)) { || lastPathSegment.endsWith(VTT_FILE_EXTENSION)) {
PtsTimestampAdjuster timestampAdjuster = timestampAdjusterProvider.getAdjuster(isMaster, PtsTimestampAdjuster timestampAdjuster = timestampAdjusterProvider.getAdjuster(false,
segment.discontinuitySequenceNumber, startTimeUs); segment.discontinuitySequenceNumber, startTimeUs);
if (timestampAdjuster == null) { if (timestampAdjuster == null) {
// The master source has yet to instantiate an adjuster for the discontinuity sequence. // The master source has yet to instantiate an adjuster for the discontinuity sequence.
...@@ -492,16 +498,15 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -492,16 +498,15 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
|| previousTsChunk.discontinuitySequenceNumber != segment.discontinuitySequenceNumber || previousTsChunk.discontinuitySequenceNumber != segment.discontinuitySequenceNumber
|| !format.equals(previousTsChunk.format)) { || !format.equals(previousTsChunk.format)) {
// MPEG-2 TS segments, but we need a new extractor. // MPEG-2 TS segments, but we need a new extractor.
PtsTimestampAdjuster timestampAdjuster = timestampAdjusterProvider.getAdjuster(isMaster, PtsTimestampAdjuster timestampAdjuster = timestampAdjusterProvider.getAdjuster(true,
segment.discontinuitySequenceNumber, startTimeUs); segment.discontinuitySequenceNumber, startTimeUs);
if (timestampAdjuster == null) { if (timestampAdjuster == null) {
// The master source has yet to instantiate an adjuster for the discontinuity sequence. // The master source has yet to instantiate an adjuster for the discontinuity sequence.
return; return;
} }
ExposedTrack selectedTrack = tracks.get(selectedTrackIndex);
Extractor extractor = new TsExtractor(timestampAdjuster); Extractor extractor = new TsExtractor(timestampAdjuster);
extractorWrapper = new HlsExtractorWrapper(trigger, format, startTimeUs, extractor, extractorWrapper = new HlsExtractorWrapper(trigger, format, startTimeUs, extractor,
switchingVariantSpliced, selectedTrack.adaptiveMaxWidth, selectedTrack.adaptiveMaxHeight); switchingVariantSpliced, adaptiveMaxWidth, adaptiveMaxHeight);
} else { } else {
// MPEG-2 TS segments, and we need to continue using the same extractor. // MPEG-2 TS segments, and we need to continue using the same extractor.
extractorWrapper = previousTsChunk.extractorWrapper; extractorWrapper = previousTsChunk.extractorWrapper;
...@@ -558,8 +563,8 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -558,8 +563,8 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
EncryptionKeyChunk encryptionChunk = (EncryptionKeyChunk) chunk; EncryptionKeyChunk encryptionChunk = (EncryptionKeyChunk) chunk;
variantIndex = encryptionChunk.variantIndex; variantIndex = encryptionChunk.variantIndex;
} }
boolean alreadyBlacklisted = variantBlacklistTimes[variantIndex] != 0; boolean alreadyBlacklisted = enabledVariantBlacklistTimes[variantIndex] != 0;
variantBlacklistTimes[variantIndex] = SystemClock.elapsedRealtime(); enabledVariantBlacklistTimes[variantIndex] = SystemClock.elapsedRealtime();
if (alreadyBlacklisted) { if (alreadyBlacklisted) {
// The playlist was already blacklisted. // The playlist was already blacklisted.
Log.w(TAG, "Already blacklisted variant (" + responseCode + "): " Log.w(TAG, "Already blacklisted variant (" + responseCode + "): "
...@@ -574,7 +579,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -574,7 +579,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
// This was the last non-blacklisted playlist. Don't blacklist it. // This was the last non-blacklisted playlist. Don't blacklist it.
Log.w(TAG, "Final variant not blacklisted (" + responseCode + "): " Log.w(TAG, "Final variant not blacklisted (" + responseCode + "): "
+ chunk.dataSpec.uri); + chunk.dataSpec.uri);
variantBlacklistTimes[variantIndex] = 0; enabledVariantBlacklistTimes[variantIndex] = 0;
return false; return false;
} }
} }
...@@ -582,52 +587,68 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -582,52 +587,68 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
return false; return false;
} }
// HlsTrackSelector.Output implementation. // Private methods.
@Override private void processMasterPlaylist(HlsMasterPlaylist playlist) {
public void adaptiveTrack(HlsMasterPlaylist playlist, Variant[] variants) { if (type == TYPE_VTT) {
Arrays.sort(variants, new Comparator<Variant>() { List<Variant> subtitleVariants = playlist.subtitles;
private final Comparator<Format> formatComparator = if (subtitleVariants != null) {
new Format.DecreasingBandwidthComparator(); exposedVariants = new Variant[subtitleVariants.size()];
@Override subtitleVariants.toArray(exposedVariants);
public int compare(Variant first, Variant second) { } else {
return formatComparator.compare(first.format, second.format); exposedVariants = new Variant[0];
} }
}); return;
}
int defaultVariantIndex = 0;
int maxWidth = -1;
int maxHeight = -1;
int minOriginalVariantIndex = Integer.MAX_VALUE; // Type is TYPE_DEFAULT.
for (int i = 0; i < variants.length; i++) { List<Variant> enabledVariantList = playlist.variants;
int originalVariantIndex = playlist.variants.indexOf(variants[i]); ArrayList<Variant> definiteVideoVariants = new ArrayList<>();
if (originalVariantIndex < minOriginalVariantIndex) { ArrayList<Variant> definiteAudioOnlyVariants = new ArrayList<>();
minOriginalVariantIndex = originalVariantIndex; for (int i = 0; i < enabledVariantList.size(); i++) {
defaultVariantIndex = i; Variant variant = enabledVariantList.get(i);
if (variant.format.height > 0 || variantHasExplicitCodecWithPrefix(variant, "avc")) {
definiteVideoVariants.add(variant);
} else if (variantHasExplicitCodecWithPrefix(variant, "mp4a")) {
definiteAudioOnlyVariants.add(variant);
} }
Format variantFormat = variants[i].format;
maxWidth = Math.max(variantFormat.width, maxWidth);
maxHeight = Math.max(variantFormat.height, maxHeight);
} }
// TODO: We should allow the default values to be passed through the constructor.
// TODO: Print a warning if resolution tags are omitted.
maxWidth = maxWidth > 0 ? maxWidth : 1920;
maxHeight = maxHeight > 0 ? maxHeight : 1080;
tracks.add(new ExposedTrack(variants, defaultVariantIndex, maxWidth, maxHeight));
}
@Override if (!definiteVideoVariants.isEmpty()) {
public void fixedTrack(HlsMasterPlaylist playlist, Variant variant) { // We've identified some variants as definitely containing video. Assume variants within the
tracks.add(new ExposedTrack(variant)); // master playlist are marked consistently, and hence that we have the full set. Filter out
// any other variants, which are likely to be audio only.
enabledVariantList = definiteVideoVariants;
} else if (definiteAudioOnlyVariants.size() < enabledVariantList.size()) {
// We've identified some variants, but not all, as being audio only. Filter them out to leave
// the remaining variants, which are likely to contain video.
enabledVariantList.removeAll(definiteAudioOnlyVariants);
} else {
// Leave the enabled variants unchanged. They're likely either all video or all audio.
}
exposedVariants = new Variant[enabledVariantList.size()];
enabledVariantList.toArray(exposedVariants);
} }
// Private methods. private static boolean variantHasExplicitCodecWithPrefix(Variant variant, String prefix) {
String codecs = variant.format.codecs;
if (TextUtils.isEmpty(codecs)) {
return false;
}
String[] codecArray = codecs.split("(\\s*,\\s*)|(\\s*$)");
for (int i = 0; i < codecArray.length; i++) {
if (codecArray[i].startsWith(prefix)) {
return true;
}
}
return false;
}
private int getNextVariantIndex(TsChunk previousTsChunk, long playbackPositionUs) { private int getNextVariantIndex(TsChunk previousTsChunk, long playbackPositionUs) {
clearStaleBlacklistedVariants(); clearStaleBlacklistedVariants();
long bitrateEstimate = bandwidthMeter.getBitrateEstimate(); long bitrateEstimate = bandwidthMeter.getBitrateEstimate();
if (variantBlacklistTimes[selectedVariantIndex] != 0) { if (enabledVariantBlacklistTimes[selectedVariantIndex] != 0) {
// The current variant has been blacklisted, so we have no choice but to re-evaluate. // The current variant has been blacklisted, so we have no choice but to re-evaluate.
return getVariantIndexForBandwidth(bitrateEstimate); return getVariantIndexForBandwidth(bitrateEstimate);
} }
...@@ -649,7 +670,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -649,7 +670,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
long bufferedPositionUs = adaptiveMode == ADAPTIVE_MODE_SPLICE ? previousTsChunk.startTimeUs long bufferedPositionUs = adaptiveMode == ADAPTIVE_MODE_SPLICE ? previousTsChunk.startTimeUs
: previousTsChunk.endTimeUs; : previousTsChunk.endTimeUs;
long bufferedUs = bufferedPositionUs - playbackPositionUs; long bufferedUs = bufferedPositionUs - playbackPositionUs;
if (variantBlacklistTimes[selectedVariantIndex] != 0 if (enabledVariantBlacklistTimes[selectedVariantIndex] != 0
|| (idealIndex > selectedVariantIndex && bufferedUs < maxBufferDurationToSwitchDownUs) || (idealIndex > selectedVariantIndex && bufferedUs < maxBufferDurationToSwitchDownUs)
|| (idealIndex < selectedVariantIndex && bufferedUs > minBufferDurationToSwitchUpUs)) { || (idealIndex < selectedVariantIndex && bufferedUs > minBufferDurationToSwitchUpUs)) {
// Switch variant. // Switch variant.
...@@ -666,9 +687,9 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -666,9 +687,9 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
} }
int effectiveBitrate = (int) (bitrateEstimate * BANDWIDTH_FRACTION); int effectiveBitrate = (int) (bitrateEstimate * BANDWIDTH_FRACTION);
int lowestQualityEnabledVariantIndex = -1; int lowestQualityEnabledVariantIndex = -1;
for (int i = 0; i < variants.length; i++) { for (int i = 0; i < enabledVariants.length; i++) {
if (variantBlacklistTimes[i] == 0) { if (enabledVariantBlacklistTimes[i] == 0) {
if (variants[i].format.bitrate <= effectiveBitrate) { if (enabledVariants[i].format.bitrate <= effectiveBitrate) {
return i; return i;
} }
lowestQualityEnabledVariantIndex = i; lowestQualityEnabledVariantIndex = i;
...@@ -681,21 +702,21 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -681,21 +702,21 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
private boolean shouldRerequestLiveMediaPlaylist(int nextVariantIndex) { private boolean shouldRerequestLiveMediaPlaylist(int nextVariantIndex) {
// Don't re-request media playlist more often than one-half of the target duration. // Don't re-request media playlist more often than one-half of the target duration.
HlsMediaPlaylist mediaPlaylist = variantPlaylists[nextVariantIndex]; HlsMediaPlaylist mediaPlaylist = enabledVariantPlaylists[nextVariantIndex];
long timeSinceLastMediaPlaylistLoadMs = long timeSinceLastMediaPlaylistLoadMs =
SystemClock.elapsedRealtime() - variantLastPlaylistLoadTimesMs[nextVariantIndex]; SystemClock.elapsedRealtime() - enabledVariantLastPlaylistLoadTimesMs[nextVariantIndex];
return timeSinceLastMediaPlaylistLoadMs >= (mediaPlaylist.targetDurationSecs * 1000) / 2; return timeSinceLastMediaPlaylistLoadMs >= (mediaPlaylist.targetDurationSecs * 1000) / 2;
} }
private int getLiveStartChunkMediaSequence(int variantIndex) { private int getLiveStartChunkMediaSequence(int variantIndex) {
// For live start playback from the third chunk from the end. // For live start playback from the third chunk from the end.
HlsMediaPlaylist mediaPlaylist = variantPlaylists[variantIndex]; HlsMediaPlaylist mediaPlaylist = enabledVariantPlaylists[variantIndex];
int chunkIndex = mediaPlaylist.segments.size() > 3 ? mediaPlaylist.segments.size() - 3 : 0; int chunkIndex = mediaPlaylist.segments.size() > 3 ? mediaPlaylist.segments.size() - 3 : 0;
return chunkIndex + mediaPlaylist.mediaSequence; return chunkIndex + mediaPlaylist.mediaSequence;
} }
private MediaPlaylistChunk newMediaPlaylistChunk(int variantIndex) { private MediaPlaylistChunk newMediaPlaylistChunk(int variantIndex) {
Uri mediaPlaylistUri = UriUtil.resolveToUri(baseUri, variants[variantIndex].url); Uri mediaPlaylistUri = UriUtil.resolveToUri(baseUri, enabledVariants[variantIndex].url);
DataSpec dataSpec = new DataSpec(mediaPlaylistUri, 0, C.LENGTH_UNBOUNDED, null, DataSpec dataSpec = new DataSpec(mediaPlaylistUri, 0, C.LENGTH_UNBOUNDED, null,
DataSpec.FLAG_ALLOW_GZIP); DataSpec.FLAG_ALLOW_GZIP);
return new MediaPlaylistChunk(dataSource, dataSpec, scratchSpace, playlistParser, variantIndex, return new MediaPlaylistChunk(dataSource, dataSpec, scratchSpace, playlistParser, variantIndex,
...@@ -735,15 +756,15 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -735,15 +756,15 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
} }
private void setMediaPlaylist(int variantIndex, HlsMediaPlaylist mediaPlaylist) { private void setMediaPlaylist(int variantIndex, HlsMediaPlaylist mediaPlaylist) {
variantLastPlaylistLoadTimesMs[variantIndex] = SystemClock.elapsedRealtime(); enabledVariantLastPlaylistLoadTimesMs[variantIndex] = SystemClock.elapsedRealtime();
variantPlaylists[variantIndex] = mediaPlaylist; enabledVariantPlaylists[variantIndex] = mediaPlaylist;
live |= mediaPlaylist.live; live |= mediaPlaylist.live;
durationUs = live ? C.UNKNOWN_TIME_US : mediaPlaylist.durationUs; durationUs = live ? C.UNKNOWN_TIME_US : mediaPlaylist.durationUs;
} }
private boolean allVariantsBlacklisted() { private boolean allVariantsBlacklisted() {
for (int i = 0; i < variantBlacklistTimes.length; i++) { for (int i = 0; i < enabledVariantBlacklistTimes.length; i++) {
if (variantBlacklistTimes[i] == 0) { if (enabledVariantBlacklistTimes[i] == 0) {
return false; return false;
} }
} }
...@@ -752,17 +773,17 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -752,17 +773,17 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
private void clearStaleBlacklistedVariants() { private void clearStaleBlacklistedVariants() {
long currentTime = SystemClock.elapsedRealtime(); long currentTime = SystemClock.elapsedRealtime();
for (int i = 0; i < variantBlacklistTimes.length; i++) { for (int i = 0; i < enabledVariantBlacklistTimes.length; i++) {
if (variantBlacklistTimes[i] != 0 if (enabledVariantBlacklistTimes[i] != 0
&& currentTime - variantBlacklistTimes[i] > DEFAULT_PLAYLIST_BLACKLIST_MS) { && currentTime - enabledVariantBlacklistTimes[i] > DEFAULT_PLAYLIST_BLACKLIST_MS) {
variantBlacklistTimes[i] = 0; enabledVariantBlacklistTimes[i] = 0;
} }
} }
} }
private int getVariantIndex(Format format) { private int getVariantIndex(Format format) {
for (int i = 0; i < variants.length; i++) { for (int i = 0; i < enabledVariants.length; i++) {
if (variants[i].format.equals(format)) { if (enabledVariants[i].format.equals(format)) {
return i; return i;
} }
} }
...@@ -772,31 +793,6 @@ public class HlsChunkSource implements HlsTrackSelector.Output { ...@@ -772,31 +793,6 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
// Private classes. // Private classes.
private static final class ExposedTrack {
private final Variant[] variants;
private final int defaultVariantIndex;
private final int adaptiveMaxWidth;
private final int adaptiveMaxHeight;
public ExposedTrack(Variant fixedVariant) {
this.variants = new Variant[] {fixedVariant};
this.defaultVariantIndex = 0;
this.adaptiveMaxWidth = MediaFormat.NO_VALUE;
this.adaptiveMaxHeight = MediaFormat.NO_VALUE;
}
public ExposedTrack(Variant[] adaptiveVariants, int defaultVariantIndex, int maxWidth,
int maxHeight) {
this.variants = adaptiveVariants;
this.defaultVariantIndex = defaultVariantIndex;
this.adaptiveMaxWidth = maxWidth;
this.adaptiveMaxHeight = maxHeight;
}
}
private static final class MediaPlaylistChunk extends DataChunk { private static final class MediaPlaylistChunk extends DataChunk {
public final int variantIndex; public final int variantIndex;
......
...@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormat; ...@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.MediaFormatHolder; import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleHolder; import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource; import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.chunk.BaseChunkSampleSourceEventListener; import com.google.android.exoplayer.chunk.BaseChunkSampleSourceEventListener;
import com.google.android.exoplayer.chunk.Chunk; import com.google.android.exoplayer.chunk.Chunk;
import com.google.android.exoplayer.chunk.ChunkOperationHolder; import com.google.android.exoplayer.chunk.ChunkOperationHolder;
...@@ -55,9 +56,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -55,9 +56,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
private static final long NO_RESET_PENDING = Long.MIN_VALUE; private static final long NO_RESET_PENDING = Long.MIN_VALUE;
private static final int PRIMARY_TYPE_NONE = 0; private static final int PRIMARY_TYPE_NONE = 0;
private static final int PRIMARY_TYPE_TEXT = 1; private static final int PRIMARY_TYPE_AUDIO = 1;
private static final int PRIMARY_TYPE_AUDIO = 2; private static final int PRIMARY_TYPE_VIDEO = 2;
private static final int PRIMARY_TYPE_VIDEO = 3;
private final HlsChunkSource chunkSource; private final HlsChunkSource chunkSource;
private final LinkedList<HlsExtractorWrapper> extractors; private final LinkedList<HlsExtractorWrapper> extractors;
...@@ -72,24 +72,19 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -72,24 +72,19 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
private boolean prepared; private boolean prepared;
private boolean loadControlRegistered; private boolean loadControlRegistered;
private int trackCount;
private int enabledTrackCount; private int enabledTrackCount;
private Format downstreamFormat; private Format downstreamFormat;
// Tracks are complicated in HLS. See documentation of buildTracks for details. // Tracks are complicated in HLS. See documentation of buildTracks for details.
// Indexed by track (as exposed by this source). // Indexed by track (as exposed by this source).
private MediaFormat[] trackFormats; private TrackGroup[] trackGroups;
private boolean[] trackEnabledStates; private int primaryTrackGroupIndex;
private int[] primarySelectedTracks;
// Indexed by group.
private boolean[] groupEnabledStates;
private boolean[] pendingResets; private boolean[] pendingResets;
private MediaFormat[] downstreamMediaFormats; private MediaFormat[] downstreamMediaFormats;
// Maps track index (as exposed by this source) to the corresponding chunk source track index for
// primary tracks, or to -1 otherwise.
private int[] chunkSourceTrackIndices;
// Maps track index (as exposed by this source) to the corresponding extractor track index.
private int[] extractorTrackIndices;
// Indexed by extractor track index.
private boolean[] extractorTrackEnabledStates;
private long downstreamPositionUs; private long downstreamPositionUs;
private long lastSeekPositionUs; private long lastSeekPositionUs;
...@@ -186,23 +181,22 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -186,23 +181,22 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
} }
@Override @Override
public int getTrackCount() { public int getTrackGroupCount() {
Assertions.checkState(prepared); return trackGroups.length;
return trackCount;
} }
@Override @Override
public MediaFormat getFormat(int track) { public TrackGroup getTrackGroup(int group) {
Assertions.checkState(prepared); Assertions.checkState(prepared);
return trackFormats[track]; return trackGroups[group];
} }
@Override @Override
public TrackStream enable(int track, long positionUs) { public TrackStream enable(int group, int[] tracks, long positionUs) {
Assertions.checkState(prepared); Assertions.checkState(prepared);
setTrackEnabledState(track, true); setTrackGroupEnabledState(group, true);
downstreamMediaFormats[track] = null; downstreamMediaFormats[group] = null;
pendingResets[track] = false; pendingResets[group] = false;
downstreamFormat = null; downstreamFormat = null;
boolean wasLoadControlRegistered = loadControlRegistered; boolean wasLoadControlRegistered = loadControlRegistered;
if (!loadControlRegistered) { if (!loadControlRegistered) {
...@@ -211,13 +205,13 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -211,13 +205,13 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
} }
// Treat enabling of a live stream as occurring at t=0 in both of the blocks below. // Treat enabling of a live stream as occurring at t=0 in both of the blocks below.
positionUs = chunkSource.isLive() ? 0 : positionUs; positionUs = chunkSource.isLive() ? 0 : positionUs;
int chunkSourceTrack = chunkSourceTrackIndices[track]; if (group == primaryTrackGroupIndex && !Arrays.equals(tracks, primarySelectedTracks)) {
if (chunkSourceTrack != -1 && chunkSourceTrack != chunkSource.getSelectedTrackIndex()) {
// This is a primary track whose corresponding chunk source track is different to the one // This is a primary track whose corresponding chunk source track is different to the one
// currently selected. We need to change the selection and restart. Since other exposed tracks // currently selected. We need to change the selection and restart. Since other exposed tracks
// may be enabled too, we need to implement the restart as a seek so that all downstream // may be enabled too, we need to implement the restart as a seek so that all downstream
// renderers receive a discontinuity event. // renderers receive a discontinuity event.
chunkSource.selectTrack(chunkSourceTrack); chunkSource.selectTracks(tracks);
primarySelectedTracks = tracks;
seekToInternal(positionUs); seekToInternal(positionUs);
} else if (enabledTrackCount == 1) { } else if (enabledTrackCount == 1) {
lastSeekPositionUs = positionUs; lastSeekPositionUs = positionUs;
...@@ -232,12 +226,12 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -232,12 +226,12 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
restartFrom(positionUs); restartFrom(positionUs);
} }
} }
return new TrackStreamImpl(track); return new TrackStreamImpl(group);
} }
/* package */ void disable(int track) { /* package */ void disable(int group) {
Assertions.checkState(prepared); Assertions.checkState(prepared);
setTrackEnabledState(track, false); setTrackGroupEnabledState(group, false);
if (enabledTrackCount == 0) { if (enabledTrackCount == 0) {
chunkSource.reset(); chunkSource.reset();
downstreamPositionUs = Long.MIN_VALUE; downstreamPositionUs = Long.MIN_VALUE;
...@@ -267,8 +261,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -267,8 +261,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
maybeStartLoading(); maybeStartLoading();
} }
/* package */ boolean isReady(int track) { /* package */ boolean isReady(int group) {
Assertions.checkState(trackEnabledStates[track]); Assertions.checkState(groupEnabledStates[group]);
if (loadingFinished) { if (loadingFinished) {
return true; return true;
} }
...@@ -280,26 +274,26 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -280,26 +274,26 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
if (!extractor.isPrepared()) { if (!extractor.isPrepared()) {
break; break;
} }
int extractorTrack = extractorTrackIndices[track]; if (extractor.hasSamples(group)) {
if (extractor.hasSamples(extractorTrack)) {
return true; return true;
} }
} }
return false; return false;
} }
/* package */ long readReset(int track) { /* package */ long readReset(int group) {
if (pendingResets[track]) { if (pendingResets[group]) {
pendingResets[track] = false; pendingResets[group] = false;
return lastSeekPositionUs; return lastSeekPositionUs;
} }
return TrackStream.NO_RESET; return TrackStream.NO_RESET;
} }
/* package */ int readData(int track, MediaFormatHolder formatHolder, SampleHolder sampleHolder) { /* package */ int readData(int group, MediaFormatHolder formatHolder,
SampleHolder sampleHolder) {
Assertions.checkState(prepared); Assertions.checkState(prepared);
if (pendingResets[track] || isPendingReset()) { if (pendingResets[group] || isPendingReset()) {
return TrackStream.NOTHING_READ; return TrackStream.NOTHING_READ;
} }
...@@ -320,9 +314,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -320,9 +314,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
extractor.configureSpliceTo(extractors.get(1)); extractor.configureSpliceTo(extractors.get(1));
} }
int extractorTrack = extractorTrackIndices[track];
int extractorIndex = 0; int extractorIndex = 0;
while (extractors.size() > extractorIndex + 1 && !extractor.hasSamples(extractorTrack)) { while (extractors.size() > extractorIndex + 1 && !extractor.hasSamples(group)) {
// We're finished reading from the extractor for this particular track, so advance to the // We're finished reading from the extractor for this particular track, so advance to the
// next one for the current read. // next one for the current read.
extractor = extractors.get(++extractorIndex); extractor = extractors.get(++extractorIndex);
...@@ -331,14 +324,14 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -331,14 +324,14 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
} }
} }
MediaFormat mediaFormat = extractor.getMediaFormat(extractorTrack); MediaFormat mediaFormat = extractor.getMediaFormat(group);
if (mediaFormat != null && !mediaFormat.equals(downstreamMediaFormats[track])) { if (mediaFormat != null && !mediaFormat.equals(downstreamMediaFormats[group])) {
formatHolder.format = mediaFormat; formatHolder.format = mediaFormat;
downstreamMediaFormats[track] = mediaFormat; downstreamMediaFormats[group] = mediaFormat;
return TrackStream.FORMAT_READ; return TrackStream.FORMAT_READ;
} }
if (extractor.getSample(extractorTrack, sampleHolder)) { if (extractor.getSample(group, sampleHolder)) {
boolean decodeOnly = sampleHolder.timeUs < lastSeekPositionUs; boolean decodeOnly = sampleHolder.timeUs < lastSeekPositionUs;
sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0; sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0;
return TrackStream.SAMPLE_READ; return TrackStream.SAMPLE_READ;
...@@ -499,8 +492,6 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -499,8 +492,6 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
trackType = PRIMARY_TYPE_VIDEO; trackType = PRIMARY_TYPE_VIDEO;
} else if (MimeTypes.isAudio(mimeType)) { } else if (MimeTypes.isAudio(mimeType)) {
trackType = PRIMARY_TYPE_AUDIO; trackType = PRIMARY_TYPE_AUDIO;
} else if (MimeTypes.isText(mimeType)) {
trackType = PRIMARY_TYPE_TEXT;
} else { } else {
trackType = PRIMARY_TYPE_NONE; trackType = PRIMARY_TYPE_NONE;
} }
...@@ -516,54 +507,39 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -516,54 +507,39 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
// Calculate the number of tracks that will be exposed. // Calculate the number of tracks that will be exposed.
int chunkSourceTrackCount = chunkSource.getTrackCount(); int chunkSourceTrackCount = chunkSource.getTrackCount();
boolean expandPrimaryExtractorTrack = primaryExtractorTrackIndex != -1;
trackCount = extractorTrackCount;
if (expandPrimaryExtractorTrack) {
trackCount += chunkSourceTrackCount - 1;
}
// Instantiate the necessary internal data-structures. // Instantiate the necessary internal data-structures.
trackFormats = new MediaFormat[trackCount]; primaryTrackGroupIndex = -1;
trackEnabledStates = new boolean[trackCount]; trackGroups = new TrackGroup[extractorTrackCount];
pendingResets = new boolean[trackCount]; groupEnabledStates = new boolean[extractorTrackCount];
downstreamMediaFormats = new MediaFormat[trackCount]; pendingResets = new boolean[extractorTrackCount];
chunkSourceTrackIndices = new int[trackCount]; downstreamMediaFormats = new MediaFormat[extractorTrackCount];
extractorTrackIndices = new int[trackCount];
extractorTrackEnabledStates = new boolean[extractorTrackCount]; // Construct the set of exposed track groups.
// Construct the set of exposed tracks.
long durationUs = chunkSource.getDurationUs();
int trackIndex = 0;
for (int i = 0; i < extractorTrackCount; i++) { for (int i = 0; i < extractorTrackCount; i++) {
MediaFormat format = extractor.getMediaFormat(i).copyWithDurationUs(durationUs); MediaFormat format = extractor.getMediaFormat(i);
if (i == primaryExtractorTrackIndex) { if (i == primaryExtractorTrackIndex) {
MediaFormat[] formats = new MediaFormat[chunkSourceTrackCount];
for (int j = 0; j < chunkSourceTrackCount; j++) { for (int j = 0; j < chunkSourceTrackCount; j++) {
extractorTrackIndices[trackIndex] = i; formats[j] = copyWithFixedTrackInfo(format, chunkSource.getTrackFormat(j));
chunkSourceTrackIndices[trackIndex] = j;
Variant fixedTrackVariant = chunkSource.getFixedTrackVariant(j);
trackFormats[trackIndex++] = fixedTrackVariant == null ? format.copyAsAdaptive(null)
: copyWithFixedTrackInfo(format, fixedTrackVariant.format);
} }
trackGroups[i] = new TrackGroup(true, formats);
primaryTrackGroupIndex = i;
} else { } else {
extractorTrackIndices[trackIndex] = i; trackGroups[i] = new TrackGroup(format);
chunkSourceTrackIndices[trackIndex] = -1;
trackFormats[trackIndex++] = format;
} }
} }
} }
/** /**
* Enables or disables the track at a given index. * Enables or disables a specified track group.
* *
* @param track The index of the track. * @param group The index of the track group.
* @param enabledState True if the track is being enabled, or false if it's being disabled. * @param enabledState True if the group is being enabled, or false if it's being disabled.
*/ */
private void setTrackEnabledState(int track, boolean enabledState) { private void setTrackGroupEnabledState(int group, boolean enabledState) {
Assertions.checkState(trackEnabledStates[track] != enabledState); Assertions.checkState(groupEnabledStates[group] != enabledState);
int extractorTrack = extractorTrackIndices[track]; groupEnabledStates[group] = enabledState;
Assertions.checkState(extractorTrackEnabledStates[extractorTrack] != enabledState);
trackEnabledStates[track] = enabledState;
extractorTrackEnabledStates[extractorTrack] = enabledState;
enabledTrackCount = enabledTrackCount + (enabledState ? 1 : -1); enabledTrackCount = enabledTrackCount + (enabledState ? 1 : -1);
} }
...@@ -619,8 +595,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -619,8 +595,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
if (!extractor.isPrepared()) { if (!extractor.isPrepared()) {
return; return;
} }
for (int i = 0; i < extractorTrackEnabledStates.length; i++) { for (int i = 0; i < groupEnabledStates.length; i++) {
if (!extractorTrackEnabledStates[i]) { if (!groupEnabledStates[i]) {
extractor.discardUntil(i, timeUs); extractor.discardUntil(i, timeUs);
} }
} }
...@@ -630,8 +606,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -630,8 +606,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
if (!extractor.isPrepared()) { if (!extractor.isPrepared()) {
return false; return false;
} }
for (int i = 0; i < extractorTrackEnabledStates.length; i++) { for (int i = 0; i < groupEnabledStates.length; i++) {
if (extractorTrackEnabledStates[i] && extractor.hasSamples(i)) { if (groupEnabledStates[i] && extractor.hasSamples(i)) {
return true; return true;
} }
} }
...@@ -819,15 +795,15 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -819,15 +795,15 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
private final class TrackStreamImpl implements TrackStream { private final class TrackStreamImpl implements TrackStream {
private final int track; private final int group;
public TrackStreamImpl(int track) { public TrackStreamImpl(int group) {
this.track = track; this.group = group;
} }
@Override @Override
public boolean isReady() { public boolean isReady() {
return HlsSampleSource.this.isReady(track); return HlsSampleSource.this.isReady(group);
} }
@Override @Override
...@@ -837,17 +813,17 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback { ...@@ -837,17 +813,17 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
@Override @Override
public long readReset() { public long readReset() {
return HlsSampleSource.this.readReset(track); return HlsSampleSource.this.readReset(group);
} }
@Override @Override
public int readData(MediaFormatHolder formatHolder, SampleHolder sampleHolder) { public int readData(MediaFormatHolder formatHolder, SampleHolder sampleHolder) {
return HlsSampleSource.this.readData(track, formatHolder, sampleHolder); return HlsSampleSource.this.readData(group, formatHolder, sampleHolder);
} }
@Override @Override
public void disable() { public void disable() {
HlsSampleSource.this.disable(track); HlsSampleSource.this.disable(group);
} }
} }
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.hls;
import java.io.IOException;
/**
* Specifies a track selection from an {@link HlsMasterPlaylist}.
*/
public interface HlsTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified representations in the specified
* adaptation set.
*
* @param playlist The master playlist being processed.
* @param variants The variants to use for the adaptive track.
*/
void adaptiveTrack(HlsMasterPlaylist playlist, Variant[] variants);
/**
* Outputs an fixed track corresponding to the specified representation in the specified
* adaptation set.
*
* @param playlist The master playlist being processed.
* @param variant The variant to use for the track.
*/
void fixedTrack(HlsMasterPlaylist playlist, Variant variant);
}
/**
* Outputs a track selection for a given period.
*
* @param playlist The master playlist to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the period.
*/
void selectTracks(HlsMasterPlaylist playlist, Output output) throws IOException;
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.smoothstreaming;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.TrackElement;
import com.google.android.exoplayer.util.Util;
import android.content.Context;
import java.io.IOException;
import java.util.Arrays;
/**
* A default {@link SmoothStreamingTrackSelector} implementation.
*/
// TODO: Add more configuration options (e.g. ability to disable adaptive track output).
public final class DefaultSmoothStreamingTrackSelector implements SmoothStreamingTrackSelector {
private final int streamElementType;
private final Context context;
private final boolean filterVideoRepresentations;
private final boolean filterProtectedHdContent;
/**
* @param context A context. May be null if {@code filterVideoRepresentations == false}.
* @param filterVideoRepresentations Whether video representations should be filtered according to
* the capabilities of the device. It is strongly recommended to set this to {@code true},
* unless the application has already verified that all representations are playable.
* @param filterProtectedHdContent Whether video representations that are both drm protected and
* high definition should be filtered when tracks are built. If
* {@code filterVideoRepresentations == false} then this parameter is ignored.
*/
public static DefaultSmoothStreamingTrackSelector newVideoInstance(Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_VIDEO, context,
filterVideoRepresentations, filterProtectedHdContent);
}
public static DefaultSmoothStreamingTrackSelector newAudioInstance() {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_AUDIO, null, false, false);
}
public static DefaultSmoothStreamingTrackSelector newTextInstance() {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_TEXT, null, false, false);
}
private DefaultSmoothStreamingTrackSelector(int streamElementType, Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
this.context = context;
this.streamElementType = streamElementType;
this.filterVideoRepresentations = filterVideoRepresentations;
this.filterProtectedHdContent = filterProtectedHdContent;
}
@Override
public void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException {
for (int i = 0; i < manifest.streamElements.length; i++) {
TrackElement[] tracks = manifest.streamElements[i].tracks;
if (manifest.streamElements[i].type == streamElementType) {
if (streamElementType == StreamElement.TYPE_VIDEO) {
int[] trackIndices;
if (filterVideoRepresentations) {
trackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, Arrays.asList(tracks), null,
filterProtectedHdContent && manifest.protectionElement != null);
} else {
trackIndices = Util.firstIntegersArray(tracks.length);
}
int trackCount = trackIndices.length;
if (trackCount > 1) {
output.adaptiveTrack(manifest, i, trackIndices);
}
for (int j = 0; j < trackCount; j++) {
output.fixedTrack(manifest, i, trackIndices[j]);
}
} else {
for (int j = 0; j < tracks.length; j++) {
output.fixedTrack(manifest, i, j);
}
}
}
}
}
}
...@@ -18,6 +18,7 @@ package com.google.android.exoplayer.smoothstreaming; ...@@ -18,6 +18,7 @@ package com.google.android.exoplayer.smoothstreaming;
import com.google.android.exoplayer.BehindLiveWindowException; import com.google.android.exoplayer.BehindLiveWindowException;
import com.google.android.exoplayer.C; import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.chunk.Chunk; import com.google.android.exoplayer.chunk.Chunk;
import com.google.android.exoplayer.chunk.ChunkExtractorWrapper; import com.google.android.exoplayer.chunk.ChunkExtractorWrapper;
import com.google.android.exoplayer.chunk.ChunkOperationHolder; import com.google.android.exoplayer.chunk.ChunkOperationHolder;
...@@ -49,7 +50,6 @@ import android.util.Base64; ...@@ -49,7 +50,6 @@ import android.util.Base64;
import android.util.SparseArray; import android.util.SparseArray;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
...@@ -57,13 +57,13 @@ import java.util.List; ...@@ -57,13 +57,13 @@ import java.util.List;
/** /**
* An {@link ChunkSource} for SmoothStreaming. * An {@link ChunkSource} for SmoothStreaming.
*/ */
public class SmoothStreamingChunkSource implements ChunkSource, // TODO[REFACTOR]: Handle multiple stream elements of the same type (at a higher level).
SmoothStreamingTrackSelector.Output { public class SmoothStreamingChunkSource implements ChunkSource {
private static final int MINIMUM_MANIFEST_REFRESH_PERIOD_MS = 5000; private static final int MINIMUM_MANIFEST_REFRESH_PERIOD_MS = 5000;
private static final int INITIALIZATION_VECTOR_SIZE = 8; private static final int INITIALIZATION_VECTOR_SIZE = 8;
private final SmoothStreamingTrackSelector trackSelector; private final int streamElementType;
private final DataSource dataSource; private final DataSource dataSource;
private final Evaluation evaluation; private final Evaluation evaluation;
private final long liveEdgeLatencyUs; private final long liveEdgeLatencyUs;
...@@ -73,20 +73,26 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -73,20 +73,26 @@ public class SmoothStreamingChunkSource implements ChunkSource,
private final FormatEvaluator adaptiveFormatEvaluator; private final FormatEvaluator adaptiveFormatEvaluator;
private final boolean live; private final boolean live;
// The tracks exposed by this source.
private final ArrayList<ExposedTrack> tracks;
// Mappings from manifest track key.
private final SparseArray<ChunkExtractorWrapper> extractorWrappers;
private final SparseArray<MediaFormat> mediaFormats;
private boolean prepareCalled; private boolean prepareCalled;
private SmoothStreamingManifest currentManifest; private SmoothStreamingManifest currentManifest;
private int currentManifestChunkOffset; private int currentManifestChunkOffset;
private boolean needManifestRefresh; private boolean needManifestRefresh;
private ExposedTrack enabledTrack;
private IOException fatalError; private IOException fatalError;
// Properties of exposed tracks.
private int elementIndex;
private TrackGroup trackGroup;
private Format[] trackFormats;
// Properties of enabled tracks.
private Format[] enabledFormats;
private int adaptiveMaxWidth;
private int adaptiveMaxHeight;
// Mappings from manifest track key.
private final SparseArray<ChunkExtractorWrapper> extractorWrappers;
private final SparseArray<MediaFormat> mediaFormats;
/** /**
* Constructor to use for live streaming. * Constructor to use for live streaming.
* <p> * <p>
...@@ -95,7 +101,9 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -95,7 +101,9 @@ public class SmoothStreamingChunkSource implements ChunkSource,
* *
* @param manifestFetcher A fetcher for the manifest, which must have already successfully * @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load. * completed an initial load.
* @param trackSelector Selects tracks from the manifest to be exposed by this source. * @param streamElementType The type of stream element exposed by this source. One of
* {@link StreamElement#TYPE_VIDEO}, {@link StreamElement#TYPE_AUDIO} and
* {@link StreamElement#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
...@@ -105,9 +113,9 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -105,9 +113,9 @@ public class SmoothStreamingChunkSource implements ChunkSource,
* Hence a small value may increase the probability of rebuffering and playback failures. * Hence a small value may increase the probability of rebuffering and playback failures.
*/ */
public SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher, public SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher,
SmoothStreamingTrackSelector trackSelector, DataSource dataSource, int streamElementType, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs) { long liveEdgeLatencyMs) {
this(manifestFetcher, manifestFetcher.getManifest(), trackSelector, dataSource, this(manifestFetcher, manifestFetcher.getManifest(), streamElementType, dataSource,
adaptiveFormatEvaluator, liveEdgeLatencyMs); adaptiveFormatEvaluator, liveEdgeLatencyMs);
} }
...@@ -115,27 +123,27 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -115,27 +123,27 @@ public class SmoothStreamingChunkSource implements ChunkSource,
* Constructor to use for fixed duration content. * Constructor to use for fixed duration content.
* *
* @param manifest The manifest parsed from {@code baseUrl + "/Manifest"}. * @param manifest The manifest parsed from {@code baseUrl + "/Manifest"}.
* @param trackSelector Selects tracks from the manifest to be exposed by this source. * @param streamElementType The type of stream element exposed by this source. One of
* {@link StreamElement#TYPE_VIDEO}, {@link StreamElement#TYPE_AUDIO} and
* {@link StreamElement#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
*/ */
public SmoothStreamingChunkSource(SmoothStreamingManifest manifest, public SmoothStreamingChunkSource(SmoothStreamingManifest manifest, int streamElementType,
SmoothStreamingTrackSelector trackSelector, DataSource dataSource, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator) {
FormatEvaluator adaptiveFormatEvaluator) { this(null, manifest, streamElementType, dataSource, adaptiveFormatEvaluator, 0);
this(null, manifest, trackSelector, dataSource, adaptiveFormatEvaluator, 0);
} }
private SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher, private SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher,
SmoothStreamingManifest initialManifest, SmoothStreamingTrackSelector trackSelector, SmoothStreamingManifest initialManifest, int streamElementType, DataSource dataSource,
DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs) { FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs) {
this.manifestFetcher = manifestFetcher; this.manifestFetcher = manifestFetcher;
this.currentManifest = initialManifest; this.currentManifest = initialManifest;
this.trackSelector = trackSelector; this.streamElementType = streamElementType;
this.dataSource = dataSource; this.dataSource = dataSource;
this.adaptiveFormatEvaluator = adaptiveFormatEvaluator; this.adaptiveFormatEvaluator = adaptiveFormatEvaluator;
this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000; this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000;
evaluation = new Evaluation(); evaluation = new Evaluation();
tracks = new ArrayList<>();
extractorWrappers = new SparseArray<>(); extractorWrappers = new SparseArray<>();
mediaFormats = new SparseArray<>(); mediaFormats = new SparseArray<>();
live = initialManifest.isLive; live = initialManifest.isLive;
...@@ -168,31 +176,35 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -168,31 +176,35 @@ public class SmoothStreamingChunkSource implements ChunkSource,
@Override @Override
public boolean prepare() { public boolean prepare() {
if (!prepareCalled) { if (!prepareCalled) {
selectTracks(currentManifest);
prepareCalled = true; prepareCalled = true;
try {
trackSelector.selectTracks(currentManifest, this);
} catch (IOException e) {
fatalError = e;
}
} }
return fatalError == null; return true;
}
@Override
public int getTrackCount() {
return tracks.size();
} }
@Override @Override
public final MediaFormat getFormat(int track) { public final TrackGroup getTracks() {
return tracks.get(track).trackFormat; return trackGroup;
} }
@Override @Override
public void enable(int track) { public void enable(int[] tracks) {
enabledTrack = tracks.get(track); int maxWidth = -1;
if (enabledTrack.isAdaptive()) { int maxHeight = -1;
enabledFormats = new Format[tracks.length];
for (int i = 0; i < tracks.length; i++) {
enabledFormats[i] = trackFormats[tracks[i]];
maxWidth = Math.max(enabledFormats[i].width, maxWidth);
maxHeight = Math.max(enabledFormats[i].height, maxHeight);
}
Arrays.sort(enabledFormats, new DecreasingBandwidthComparator());
if (enabledFormats.length > 1) {
adaptiveMaxWidth = maxWidth;
adaptiveMaxHeight = maxHeight;
adaptiveFormatEvaluator.enable(); adaptiveFormatEvaluator.enable();
} else {
adaptiveMaxWidth = -1;
adaptiveMaxHeight = -1;
} }
if (manifestFetcher != null) { if (manifestFetcher != null) {
manifestFetcher.enable(); manifestFetcher.enable();
...@@ -207,9 +219,9 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -207,9 +219,9 @@ public class SmoothStreamingChunkSource implements ChunkSource,
SmoothStreamingManifest newManifest = manifestFetcher.getManifest(); SmoothStreamingManifest newManifest = manifestFetcher.getManifest();
if (currentManifest != newManifest && newManifest != null) { if (currentManifest != newManifest && newManifest != null) {
StreamElement currentElement = currentManifest.streamElements[enabledTrack.elementIndex]; StreamElement currentElement = currentManifest.streamElements[elementIndex];
int currentElementChunkCount = currentElement.chunkCount; int currentElementChunkCount = currentElement.chunkCount;
StreamElement newElement = newManifest.streamElements[enabledTrack.elementIndex]; StreamElement newElement = newManifest.streamElements[elementIndex];
if (currentElementChunkCount == 0 || newElement.chunkCount == 0) { if (currentElementChunkCount == 0 || newElement.chunkCount == 0) {
// There's no overlap between the old and new elements because at least one is empty. // There's no overlap between the old and new elements because at least one is empty.
currentManifestChunkOffset += currentElementChunkCount; currentManifestChunkOffset += currentElementChunkCount;
...@@ -244,11 +256,10 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -244,11 +256,10 @@ public class SmoothStreamingChunkSource implements ChunkSource,
} }
evaluation.queueSize = queue.size(); evaluation.queueSize = queue.size();
if (enabledTrack.isAdaptive()) { if (enabledFormats.length > 1) {
adaptiveFormatEvaluator.evaluate(queue, playbackPositionUs, enabledTrack.adaptiveFormats, adaptiveFormatEvaluator.evaluate(queue, playbackPositionUs, enabledFormats, evaluation);
evaluation);
} else { } else {
evaluation.format = enabledTrack.fixedFormat; evaluation.format = enabledFormats[0];
evaluation.trigger = Chunk.TRIGGER_MANUAL; evaluation.trigger = Chunk.TRIGGER_MANUAL;
} }
...@@ -268,7 +279,7 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -268,7 +279,7 @@ public class SmoothStreamingChunkSource implements ChunkSource,
// In all cases where we return before instantiating a new chunk, we want out.chunk to be null. // In all cases where we return before instantiating a new chunk, we want out.chunk to be null.
out.chunk = null; out.chunk = null;
StreamElement streamElement = currentManifest.streamElements[enabledTrack.elementIndex]; StreamElement streamElement = currentManifest.streamElements[elementIndex];
if (streamElement.chunkCount == 0) { if (streamElement.chunkCount == 0) {
if (currentManifest.isLive) { if (currentManifest.isLive) {
needManifestRefresh = true; needManifestRefresh = true;
...@@ -315,12 +326,12 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -315,12 +326,12 @@ public class SmoothStreamingChunkSource implements ChunkSource,
int currentAbsoluteChunkIndex = chunkIndex + currentManifestChunkOffset; int currentAbsoluteChunkIndex = chunkIndex + currentManifestChunkOffset;
int manifestTrackIndex = getManifestTrackIndex(streamElement, selectedFormat); int manifestTrackIndex = getManifestTrackIndex(streamElement, selectedFormat);
int manifestTrackKey = getManifestTrackKey(enabledTrack.elementIndex, manifestTrackIndex); int manifestTrackKey = getManifestTrackKey(elementIndex, manifestTrackIndex);
Uri uri = streamElement.buildRequestUri(manifestTrackIndex, chunkIndex); Uri uri = streamElement.buildRequestUri(manifestTrackIndex, chunkIndex);
Chunk mediaChunk = newMediaChunk(selectedFormat, uri, null, Chunk mediaChunk = newMediaChunk(selectedFormat, uri, null,
extractorWrappers.get(manifestTrackKey), drmInitData, dataSource, currentAbsoluteChunkIndex, extractorWrappers.get(manifestTrackKey), drmInitData, dataSource, currentAbsoluteChunkIndex,
chunkStartTimeUs, chunkEndTimeUs, evaluation.trigger, mediaFormats.get(manifestTrackKey), chunkStartTimeUs, chunkEndTimeUs, evaluation.trigger, mediaFormats.get(manifestTrackKey),
enabledTrack.adaptiveMaxWidth, enabledTrack.adaptiveMaxHeight); adaptiveMaxWidth, adaptiveMaxHeight);
out.chunk = mediaChunk; out.chunk = mediaChunk;
} }
...@@ -336,7 +347,7 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -336,7 +347,7 @@ public class SmoothStreamingChunkSource implements ChunkSource,
@Override @Override
public void disable(List<? extends MediaChunk> queue) { public void disable(List<? extends MediaChunk> queue) {
if (enabledTrack.isAdaptive()) { if (enabledFormats.length > 1) {
adaptiveFormatEvaluator.disable(); adaptiveFormatEvaluator.disable();
} }
if (manifestFetcher != null) { if (manifestFetcher != null) {
...@@ -346,43 +357,28 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -346,43 +357,28 @@ public class SmoothStreamingChunkSource implements ChunkSource,
fatalError = null; fatalError = null;
} }
// SmoothStreamingTrackSelector.Output implementation. // Private methods.
@Override private void selectTracks(SmoothStreamingManifest manifest) {
public void adaptiveTrack(SmoothStreamingManifest manifest, int element, int[] trackIndices) { for (int i = 0; i < manifest.streamElements.length; i++) {
if (adaptiveFormatEvaluator == null) { if (manifest.streamElements[i].type == streamElementType) {
// Do nothing. // We've found an element of the desired type.
return; elementIndex = i;
} TrackElement[] trackElements = manifest.streamElements[i].tracks;
MediaFormat maxHeightMediaFormat = null; trackFormats = new Format[trackElements.length];
StreamElement streamElement = manifest.streamElements[element]; MediaFormat[] trackMediaFormats = new MediaFormat[trackElements.length];
int maxWidth = -1; for (int j = 0; j < trackMediaFormats.length; j++) {
int maxHeight = -1; trackFormats[j] = trackElements[j].format;
Format[] formats = new Format[trackIndices.length]; trackMediaFormats[j] = initManifestTrack(manifest, i, j);
for (int i = 0; i < formats.length; i++) { }
int manifestTrackIndex = trackIndices[i]; trackGroup = new TrackGroup(adaptiveFormatEvaluator != null, trackMediaFormats);
formats[i] = streamElement.tracks[manifestTrackIndex].format; return;
MediaFormat mediaFormat = initManifestTrack(manifest, element, manifestTrackIndex);
if (maxHeightMediaFormat == null || mediaFormat.height > maxHeight) {
maxHeightMediaFormat = mediaFormat;
} }
maxWidth = Math.max(maxWidth, mediaFormat.width);
maxHeight = Math.max(maxHeight, mediaFormat.height);
} }
Arrays.sort(formats, new DecreasingBandwidthComparator()); trackGroup = new TrackGroup(adaptiveFormatEvaluator != null);
MediaFormat adaptiveMediaFormat = maxHeightMediaFormat.copyAsAdaptive(null); trackFormats = new Format[0];
tracks.add(new ExposedTrack(adaptiveMediaFormat, element, formats, maxWidth, maxHeight));
} }
@Override
public void fixedTrack(SmoothStreamingManifest manifest, int element, int trackIndex) {
MediaFormat mediaFormat = initManifestTrack(manifest, element, trackIndex);
Format format = manifest.streamElements[element].tracks[trackIndex].format;
tracks.add(new ExposedTrack(mediaFormat, element, format));
}
// Private methods.
private MediaFormat initManifestTrack(SmoothStreamingManifest manifest, int elementIndex, private MediaFormat initManifestTrack(SmoothStreamingManifest manifest, int elementIndex,
int trackIndex) { int trackIndex) {
int manifestTrackKey = getManifestTrackKey(elementIndex, trackIndex); int manifestTrackKey = getManifestTrackKey(elementIndex, trackIndex);
...@@ -515,45 +511,4 @@ public class SmoothStreamingChunkSource implements ChunkSource, ...@@ -515,45 +511,4 @@ public class SmoothStreamingChunkSource implements ChunkSource,
data[secondPosition] = temp; data[secondPosition] = temp;
} }
// Private classes.
private static final class ExposedTrack {
public final MediaFormat trackFormat;
private final int elementIndex;
// Non-adaptive track variables.
private final Format fixedFormat;
// Adaptive track variables.
private final Format[] adaptiveFormats;
private final int adaptiveMaxWidth;
private final int adaptiveMaxHeight;
public ExposedTrack(MediaFormat trackFormat, int elementIndex, Format fixedFormat) {
this.trackFormat = trackFormat;
this.elementIndex = elementIndex;
this.fixedFormat = fixedFormat;
this.adaptiveFormats = null;
this.adaptiveMaxWidth = MediaFormat.NO_VALUE;
this.adaptiveMaxHeight = MediaFormat.NO_VALUE;
}
public ExposedTrack(MediaFormat trackFormat, int elementIndex, Format[] adaptiveFormats,
int adaptiveMaxWidth, int adaptiveMaxHeight) {
this.trackFormat = trackFormat;
this.elementIndex = elementIndex;
this.adaptiveFormats = adaptiveFormats;
this.adaptiveMaxWidth = adaptiveMaxWidth;
this.adaptiveMaxHeight = adaptiveMaxHeight;
this.fixedFormat = null;
}
public boolean isAdaptive() {
return adaptiveFormats != null;
}
}
} }
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.smoothstreaming;
import java.io.IOException;
/**
* Specifies a track selection from a {@link SmoothStreamingManifest}.
*/
public interface SmoothStreamingTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified tracks in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the adaptive tracks are located.
* @param tracks The indices of the tracks within the element.
*/
void adaptiveTrack(SmoothStreamingManifest manifest, int element, int[] tracks);
/**
* Outputs a fixed track corresponding to the specified track in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the track is located.
* @param track The index of the track within the element.
*/
void fixedTrack(SmoothStreamingManifest manifest, int element, int track);
}
/**
* Outputs a track selection for a given manifest.
*
* @param manifest The manifest to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the manifest.
*/
void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment