Commit be471489 by olly Committed by Oliver Woodman

ExoPlayer V2 Refactor - Step 4

Notes:
1. The logic in ExoPlayerImplInternal is very temporary, until we
   have proper TrackSelector implementations. Ignore the fact that
   it's crazy and has loads of nesting.
2. This change removes all capabilities checking. TrackRenderer
   implementations will be updated to perform these checks in a
   subsequent CL.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=113151233
parent 6cb20525
Showing with 256 additions and 695 deletions
...@@ -22,7 +22,7 @@ import com.google.android.exoplayer.chunk.ChunkSampleSource; ...@@ -22,7 +22,7 @@ import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.ChunkSource; import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.dash.DashChunkSource; import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.DefaultDashTrackSelector; import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser;
import com.google.android.exoplayer.dash.mpd.UtcTimingElement; import com.google.android.exoplayer.dash.mpd.UtcTimingElement;
...@@ -201,8 +201,7 @@ public class DashSourceBuilder implements SourceBuilder { ...@@ -201,8 +201,7 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the video renderer. // Build the video renderer.
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_VIDEO,
DefaultDashTrackSelector.newVideoInstance(context, true, false),
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO); elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
...@@ -211,18 +210,18 @@ public class DashSourceBuilder implements SourceBuilder { ...@@ -211,18 +210,18 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the audio renderer. // Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_AUDIO,
DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO); DemoPlayer.TYPE_AUDIO);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO); DemoPlayer.TYPE_AUDIO);
// Build the text renderer. // Build the text renderer.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_TEXT,
DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT); DemoPlayer.TYPE_TEXT);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT); DemoPlayer.TYPE_TEXT);
......
...@@ -18,7 +18,6 @@ package com.google.android.exoplayer.demo.player; ...@@ -18,7 +18,6 @@ package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.DefaultLoadControl; import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl; import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder; import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder;
import com.google.android.exoplayer.hls.DefaultHlsTrackSelector;
import com.google.android.exoplayer.hls.HlsChunkSource; import com.google.android.exoplayer.hls.HlsChunkSource;
import com.google.android.exoplayer.hls.HlsPlaylist; import com.google.android.exoplayer.hls.HlsPlaylist;
import com.google.android.exoplayer.hls.HlsPlaylistParser; import com.google.android.exoplayer.hls.HlsPlaylistParser;
...@@ -120,9 +119,8 @@ public class HlsSourceBuilder implements SourceBuilder { ...@@ -120,9 +119,8 @@ public class HlsSourceBuilder implements SourceBuilder {
// Build the video/audio/metadata renderers. // Build the video/audio/metadata renderers.
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
HlsChunkSource chunkSource = new HlsChunkSource(true /* isMaster */, dataSource, url, HlsChunkSource chunkSource = new HlsChunkSource(HlsChunkSource.TYPE_DEFAULT, dataSource, url,
manifest, DefaultHlsTrackSelector.newDefaultInstance(context), bandwidthMeter, manifest, bandwidthMeter, timestampAdjusterProvider, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
timestampAdjusterProvider, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl, HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
MAIN_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); MAIN_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
......
...@@ -23,7 +23,6 @@ import com.google.android.exoplayer.chunk.ChunkSource; ...@@ -23,7 +23,6 @@ import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder; import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder;
import com.google.android.exoplayer.drm.MediaDrmCallback; import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.smoothstreaming.DefaultSmoothStreamingTrackSelector;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser; import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser;
...@@ -152,7 +151,7 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder { ...@@ -152,7 +151,7 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the video renderer. // Build the video renderer.
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher, ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false), SmoothStreamingManifest.StreamElement.TYPE_VIDEO,
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
...@@ -161,20 +160,18 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder { ...@@ -161,20 +160,18 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the audio renderer. // Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher, ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newAudioInstance(), SmoothStreamingManifest.StreamElement.TYPE_AUDIO, audioDataSource, null,
audioDataSource, null, LIVE_EDGE_LATENCY_MS); LIVE_EDGE_LATENCY_MS);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO);
DemoPlayer.TYPE_AUDIO);
// Build the text renderer. // Build the text renderer.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher, ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newTextInstance(), SmoothStreamingManifest.StreamElement.TYPE_TEXT, textDataSource, null,
textDataSource, null, LIVE_EDGE_LATENCY_MS); LIVE_EDGE_LATENCY_MS);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT);
DemoPlayer.TYPE_TEXT);
// Invoke the callback. // Invoke the callback.
player.onSource( player.onSource(
......
...@@ -81,10 +81,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -81,10 +81,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
} }
public void testGetAvailableRangeOnVod() { public void testGetAvailableRangeOnVod() {
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), AdaptationSet.TYPE_VIDEO, null,
DefaultDashTrackSelector.newVideoInstance(null, false, false), null, null); null);
chunkSource.prepare(); chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(new int[] {0});
TimeRange availableRange = chunkSource.getAvailableRange(); TimeRange availableRange = chunkSource.getAvailableRange();
checkAvailableRange(availableRange, 0, VOD_DURATION_MS * 1000); checkAvailableRange(availableRange, 0, VOD_DURATION_MS * 1000);
...@@ -103,9 +103,9 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -103,9 +103,9 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
public void testGetAvailableRangeOnMultiPeriodVod() { public void testGetAvailableRangeOnMultiPeriodVod() {
DashChunkSource chunkSource = new DashChunkSource(buildMultiPeriodVodMpd(), DashChunkSource chunkSource = new DashChunkSource(buildMultiPeriodVodMpd(),
DefaultDashTrackSelector.newVideoInstance(null, false, false), null, null); AdaptationSet.TYPE_VIDEO, null, null);
chunkSource.prepare(); chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(new int[] {0});
TimeRange availableRange = chunkSource.getAvailableRange(); TimeRange availableRange = chunkSource.getAvailableRange();
checkAvailableRange(availableRange, 0, MULTI_PERIOD_VOD_DURATION_MS * 1000); checkAvailableRange(availableRange, 0, MULTI_PERIOD_VOD_DURATION_MS * 1000);
} }
...@@ -118,11 +118,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -118,11 +118,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
} }
public void testSegmentIndexInitializationOnVod() { public void testSegmentIndexInitializationOnVod() {
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), AdaptationSet.TYPE_VIDEO,
DefaultDashTrackSelector.newVideoInstance(null, false, false), mock(DataSource.class), mock(DataSource.class), null);
null);
chunkSource.prepare(); chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(new int[] {0});
List<MediaChunk> queue = new ArrayList<>(); List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder(); ChunkOperationHolder out = new ChunkOperationHolder();
...@@ -322,12 +321,12 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -322,12 +321,12 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
ManifestFetcher<MediaPresentationDescription> manifestFetcher = mock(ManifestFetcher.class); ManifestFetcher<MediaPresentationDescription> manifestFetcher = mock(ManifestFetcher.class);
when(manifestFetcher.getManifest()).thenReturn(mpd); when(manifestFetcher.getManifest()).thenReturn(mpd);
DashChunkSource chunkSource = new DashChunkSource(manifestFetcher, mpd, DashChunkSource chunkSource = new DashChunkSource(manifestFetcher, mpd,
DefaultDashTrackSelector.newVideoInstance(null, false, false), mock(DataSource.class), null, AdaptationSet.TYPE_VIDEO, mock(DataSource.class), null,
new FakeClock(mpd.availabilityStartTime + mpd.duration - ELAPSED_REALTIME_OFFSET_MS), new FakeClock(mpd.availabilityStartTime + mpd.duration - ELAPSED_REALTIME_OFFSET_MS),
liveEdgeLatencyMs * 1000, ELAPSED_REALTIME_OFFSET_MS * 1000, startAtLiveEdge, null, null, liveEdgeLatencyMs * 1000, ELAPSED_REALTIME_OFFSET_MS * 1000, startAtLiveEdge, null, null,
0); 0);
chunkSource.prepare(); chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(new int[] {0});
return chunkSource; return chunkSource;
} }
......
...@@ -13,4 +13,4 @@ ...@@ -13,4 +13,4 @@
# Project target. # Project target.
target=android-23 target=android-23
android.library=false android.library=false
android.library.reference.1=../experimental android.library.reference.1=../main
...@@ -75,7 +75,8 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -75,7 +75,8 @@ import java.util.concurrent.atomic.AtomicInteger;
private final long minBufferUs; private final long minBufferUs;
private final long minRebufferUs; private final long minRebufferUs;
private final List<TrackRenderer> enabledRenderers; private final List<TrackRenderer> enabledRenderers;
private final int[][] trackIndices; private final int[][] groupIndices;
private final int[][][] trackIndices;
private final int[] selectedTrackIndices; private final int[] selectedTrackIndices;
private final Handler handler; private final Handler handler;
private final HandlerThread internalPlaybackThread; private final HandlerThread internalPlaybackThread;
...@@ -125,7 +126,8 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -125,7 +126,8 @@ import java.util.concurrent.atomic.AtomicInteger;
standaloneMediaClock = new StandaloneMediaClock(); standaloneMediaClock = new StandaloneMediaClock();
pendingSeekCount = new AtomicInteger(); pendingSeekCount = new AtomicInteger();
enabledRenderers = new ArrayList<>(renderers.length); enabledRenderers = new ArrayList<>(renderers.length);
trackIndices = new int[renderers.length][]; groupIndices = new int[renderers.length][];
trackIndices = new int[renderers.length][][];
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can // Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect. // not normally change to this priority" is incorrect.
internalPlaybackThread = new PriorityHandlerThread("ExoPlayerImplInternal:Handler", internalPlaybackThread = new PriorityHandlerThread("ExoPlayerImplInternal:Handler",
...@@ -301,24 +303,56 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -301,24 +303,56 @@ import java.util.concurrent.atomic.AtomicInteger;
boolean allRenderersEnded = true; boolean allRenderersEnded = true;
boolean allRenderersReadyOrEnded = true; boolean allRenderersReadyOrEnded = true;
// Establish the mapping from renderer to track index (trackIndices), and build a list of // The maximum number of tracks that one renderer can support is the total number of tracks in
// formats corresponding to each renderer (trackFormats). // all groups, plus possibly one adaptive track per group.
int trackCount = source.getTrackCount(); int maxTrackCount = source.getTrackGroupCount();
boolean[] trackMappedFlags = new boolean[trackCount]; for (int groupIndex = 0; groupIndex < source.getTrackGroupCount(); groupIndex++) {
maxTrackCount += source.getTrackGroup(groupIndex).length;
}
// Construct tracks for each renderer.
MediaFormat[][] trackFormats = new MediaFormat[renderers.length][]; MediaFormat[][] trackFormats = new MediaFormat[renderers.length][];
for (int rendererIndex = 0; rendererIndex < renderers.length; rendererIndex++) { for (int rendererIndex = 0; rendererIndex < renderers.length; rendererIndex++) {
TrackRenderer renderer = renderers[rendererIndex]; TrackRenderer renderer = renderers[rendererIndex];
int rendererTrackCount = 0; int rendererTrackCount = 0;
int[] rendererTrackIndices = new int[trackCount]; int[] rendererTrackGroups = new int[maxTrackCount];
MediaFormat[] rendererTrackFormats = new MediaFormat[trackCount]; int[][] rendererTrackIndices = new int[maxTrackCount][];
for (int trackIndex = 0; trackIndex < trackCount; trackIndex++) { MediaFormat[] rendererTrackFormats = new MediaFormat[maxTrackCount];
MediaFormat trackFormat = source.getFormat(trackIndex); for (int groupIndex = 0; groupIndex < source.getTrackGroupCount(); groupIndex++) {
if (!trackMappedFlags[trackIndex] && renderer.handlesTrack(trackFormat)) { TrackGroup trackGroup = source.getTrackGroup(groupIndex);
trackMappedFlags[trackIndex] = true; // TODO[REFACTOR]: This should check that the renderer is capable of adaptive playback, in
rendererTrackIndices[rendererTrackCount] = trackIndex; // addition to checking that the group is adaptive.
rendererTrackFormats[rendererTrackCount++] = trackFormat; if (trackGroup.adaptive) {
// Try and build an adaptive track.
int adaptiveTrackIndexCount = 0;
int[] adaptiveTrackIndices = new int[trackGroup.length];
MediaFormat adaptiveTrackFormat = null;
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
MediaFormat trackFormat = source.getTrackGroup(groupIndex).getFormat(trackIndex);
if (renderer.handlesTrack(trackFormat)) {
adaptiveTrackIndices[adaptiveTrackIndexCount++] = trackIndex;
if (adaptiveTrackFormat == null) {
adaptiveTrackFormat = trackFormat.copyAsAdaptive("auto");
}
}
}
if (adaptiveTrackIndexCount > 1) {
// We succeeded in building an adaptive track.
rendererTrackGroups[rendererTrackCount] = groupIndex;
rendererTrackIndices[rendererTrackCount] =
Arrays.copyOf(adaptiveTrackIndices, adaptiveTrackIndexCount);
rendererTrackFormats[rendererTrackCount++] = adaptiveTrackFormat;
}
}
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
MediaFormat trackFormat = source.getTrackGroup(groupIndex).getFormat(trackIndex);
if (renderer.handlesTrack(trackFormat)) {
rendererTrackGroups[rendererTrackCount] = groupIndex;
rendererTrackIndices[rendererTrackCount] = new int[] {trackIndex};
rendererTrackFormats[rendererTrackCount++] = trackFormat;
}
} }
} }
groupIndices[rendererIndex] = Arrays.copyOf(rendererTrackGroups, rendererTrackCount);
trackIndices[rendererIndex] = Arrays.copyOf(rendererTrackIndices, rendererTrackCount); trackIndices[rendererIndex] = Arrays.copyOf(rendererTrackIndices, rendererTrackCount);
trackFormats[rendererIndex] = Arrays.copyOf(rendererTrackFormats, rendererTrackCount); trackFormats[rendererIndex] = Arrays.copyOf(rendererTrackFormats, rendererTrackCount);
} }
...@@ -328,8 +362,8 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -328,8 +362,8 @@ import java.util.concurrent.atomic.AtomicInteger;
TrackRenderer renderer = renderers[rendererIndex]; TrackRenderer renderer = renderers[rendererIndex];
int trackIndex = selectedTrackIndices[rendererIndex]; int trackIndex = selectedTrackIndices[rendererIndex];
if (0 <= trackIndex && trackIndex < trackIndices[rendererIndex].length) { if (0 <= trackIndex && trackIndex < trackIndices[rendererIndex].length) {
int sourceTrackIndex = trackIndices[rendererIndex][trackIndex]; TrackStream trackStream = source.enable(groupIndices[rendererIndex][trackIndex],
TrackStream trackStream = source.enable(sourceTrackIndex, positionUs); trackIndices[rendererIndex][trackIndex], positionUs);
renderer.enable(trackStream, positionUs, false); renderer.enable(trackStream, positionUs, false);
enabledRenderers.add(renderer); enabledRenderers.add(renderer);
allRenderersEnded = allRenderersEnded && renderer.isEnded(); allRenderersEnded = allRenderersEnded && renderer.isEnded();
...@@ -606,8 +640,8 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -606,8 +640,8 @@ import java.util.concurrent.atomic.AtomicInteger;
boolean playing = playWhenReady && state == ExoPlayer.STATE_READY; boolean playing = playWhenReady && state == ExoPlayer.STATE_READY;
// Consider as joining if the renderer was previously disabled, but not when switching tracks. // Consider as joining if the renderer was previously disabled, but not when switching tracks.
boolean joining = !isEnabled && playing; boolean joining = !isEnabled && playing;
int sourceTrackIndex = trackIndices[rendererIndex][trackIndex]; TrackStream trackStream = source.enable(groupIndices[rendererIndex][trackIndex],
TrackStream trackStream = source.enable(sourceTrackIndex, positionUs); trackIndices[rendererIndex][trackIndex], positionUs);
renderer.enable(trackStream, positionUs, joining); renderer.enable(trackStream, positionUs, joining);
enabledRenderers.add(renderer); enabledRenderers.add(renderer);
if (playing) { if (playing) {
......
...@@ -71,14 +71,14 @@ public final class FrameworkSampleSource implements SampleSource { ...@@ -71,14 +71,14 @@ public final class FrameworkSampleSource implements SampleSource {
private final long fileDescriptorOffset; private final long fileDescriptorOffset;
private final long fileDescriptorLength; private final long fileDescriptorLength;
private MediaExtractor extractor;
private MediaFormat[] trackFormats;
private boolean prepared; private boolean prepared;
private long durationUs; private long durationUs;
private int enabledTrackCount; private MediaExtractor extractor;
private TrackGroup[] tracks;
private int[] trackStates; private int[] trackStates;
private boolean[] pendingResets; private boolean[] pendingResets;
private int enabledTrackCount;
private long lastSeekPositionUs; private long lastSeekPositionUs;
private long pendingSeekPositionUs; private long pendingSeekPositionUs;
...@@ -132,10 +132,11 @@ public final class FrameworkSampleSource implements SampleSource { ...@@ -132,10 +132,11 @@ public final class FrameworkSampleSource implements SampleSource {
durationUs = C.UNKNOWN_TIME_US; durationUs = C.UNKNOWN_TIME_US;
trackStates = new int[extractor.getTrackCount()]; trackStates = new int[extractor.getTrackCount()];
pendingResets = new boolean[trackStates.length]; pendingResets = new boolean[trackStates.length];
trackFormats = new MediaFormat[trackStates.length]; tracks = new TrackGroup[trackStates.length];
for (int i = 0; i < trackStates.length; i++) { for (int i = 0; i < trackStates.length; i++) {
trackFormats[i] = createMediaFormat(extractor.getTrackFormat(i)); MediaFormat format = createMediaFormat(extractor.getTrackFormat(i));
long trackDurationUs = trackFormats[i].durationUs; tracks[i] = new TrackGroup(format);
long trackDurationUs = format.durationUs;
if (trackDurationUs > durationUs) { if (trackDurationUs > durationUs) {
durationUs = trackDurationUs; durationUs = trackDurationUs;
} }
...@@ -155,15 +156,13 @@ public final class FrameworkSampleSource implements SampleSource { ...@@ -155,15 +156,13 @@ public final class FrameworkSampleSource implements SampleSource {
} }
@Override @Override
public int getTrackCount() { public int getTrackGroupCount() {
Assertions.checkState(prepared); return tracks.length;
return trackStates.length;
} }
@Override @Override
public MediaFormat getFormat(int track) { public TrackGroup getTrackGroup(int group) {
Assertions.checkState(prepared); return tracks[group];
return trackFormats[track];
} }
@Override @Override
...@@ -172,14 +171,14 @@ public final class FrameworkSampleSource implements SampleSource { ...@@ -172,14 +171,14 @@ public final class FrameworkSampleSource implements SampleSource {
} }
@Override @Override
public TrackStream enable(int track, long positionUs) { public TrackStream enable(int group, int[] track, long positionUs) {
Assertions.checkState(prepared); Assertions.checkState(prepared);
Assertions.checkState(trackStates[track] == TRACK_STATE_DISABLED); Assertions.checkState(trackStates[group] == TRACK_STATE_DISABLED);
enabledTrackCount++; enabledTrackCount++;
trackStates[track] = TRACK_STATE_ENABLED; trackStates[group] = TRACK_STATE_ENABLED;
extractor.selectTrack(track); extractor.selectTrack(group);
seekToUsInternal(positionUs, positionUs != 0); seekToUsInternal(positionUs, positionUs != 0);
return new TrackStreamImpl(track); return new TrackStreamImpl(group);
} }
/* package */ long readReset(int track) { /* package */ long readReset(int track) {
...@@ -197,7 +196,7 @@ public final class FrameworkSampleSource implements SampleSource { ...@@ -197,7 +196,7 @@ public final class FrameworkSampleSource implements SampleSource {
return TrackStream.NOTHING_READ; return TrackStream.NOTHING_READ;
} }
if (trackStates[track] != TRACK_STATE_FORMAT_SENT) { if (trackStates[track] != TRACK_STATE_FORMAT_SENT) {
formatHolder.format = trackFormats[track]; formatHolder.format = tracks[track].getFormat(0);
formatHolder.drmInitData = Util.SDK_INT >= 18 ? getDrmInitDataV18() : null; formatHolder.drmInitData = Util.SDK_INT >= 18 ? getDrmInitDataV18() : null;
trackStates[track] = TRACK_STATE_FORMAT_SENT; trackStates[track] = TRACK_STATE_FORMAT_SENT;
return TrackStream.FORMAT_READ; return TrackStream.FORMAT_READ;
......
...@@ -28,7 +28,6 @@ import android.text.TextUtils; ...@@ -28,7 +28,6 @@ import android.text.TextUtils;
import android.util.Log; import android.util.Log;
import android.util.Pair; import android.util.Pair;
import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
/** /**
...@@ -43,8 +42,7 @@ public final class MediaCodecUtil { ...@@ -43,8 +42,7 @@ public final class MediaCodecUtil {
* Such failures are not expected in normal operation and are normally temporary (e.g. if the * Such failures are not expected in normal operation and are normally temporary (e.g. if the
* mediaserver process has crashed and is yet to restart). * mediaserver process has crashed and is yet to restart).
*/ */
// TODO[REFACTOR]: Shouldn't implement IOException. public static class DecoderQueryException extends Exception {
public static class DecoderQueryException extends IOException {
private DecoderQueryException(Throwable cause) { private DecoderQueryException(Throwable cause) {
super("Failed to query underlying media codecs", cause); super("Failed to query underlying media codecs", cause);
......
...@@ -15,6 +15,8 @@ ...@@ -15,6 +15,8 @@
*/ */
package com.google.android.exoplayer; package com.google.android.exoplayer;
import android.util.Pair;
import java.io.IOException; import java.io.IOException;
/** /**
...@@ -26,8 +28,7 @@ public class MultiSampleSource implements SampleSource { ...@@ -26,8 +28,7 @@ public class MultiSampleSource implements SampleSource {
private boolean prepared; private boolean prepared;
private long durationUs; private long durationUs;
private SampleSource[] trackSources; private TrackGroup[] tracks;
private int[] trackIndices;
public MultiSampleSource(SampleSource... sources) { public MultiSampleSource(SampleSource... sources) {
this.sources = sources; this.sources = sources;
...@@ -45,21 +46,19 @@ public class MultiSampleSource implements SampleSource { ...@@ -45,21 +46,19 @@ public class MultiSampleSource implements SampleSource {
if (prepared) { if (prepared) {
this.prepared = true; this.prepared = true;
this.durationUs = C.UNKNOWN_TIME_US; this.durationUs = C.UNKNOWN_TIME_US;
int trackCount = 0; int totalTrackGroupCount = 0;
for (int i = 0; i < sources.length; i++) { for (int i = 0; i < sources.length; i++) {
trackCount += sources[i].getTrackCount(); totalTrackGroupCount += sources[i].getTrackGroupCount();
if (sources[i].getDurationUs() > durationUs) { if (sources[i].getDurationUs() > durationUs) {
durationUs = sources[i].getDurationUs(); durationUs = sources[i].getDurationUs();
} }
} }
trackSources = new SampleSource[trackCount]; tracks = new TrackGroup[totalTrackGroupCount];
trackIndices = new int[trackCount]; int trackGroupIndex = 0;
int index = 0;
for (int i = 0; i < sources.length; i++) { for (int i = 0; i < sources.length; i++) {
int thisSourceTrackCount = sources[i].getTrackCount(); int sourceTrackGroupCount = sources[i].getTrackGroupCount();
for (int j = 0; j < thisSourceTrackCount; j++) { for (int j = 0; j < sourceTrackGroupCount; j++) {
trackSources[index] = sources[i]; tracks[trackGroupIndex++] = sources[i].getTrackGroup(j);
trackIndices[index++] = j;
} }
} }
} }
...@@ -72,18 +71,19 @@ public class MultiSampleSource implements SampleSource { ...@@ -72,18 +71,19 @@ public class MultiSampleSource implements SampleSource {
} }
@Override @Override
public int getTrackCount() { public int getTrackGroupCount() {
return trackSources.length; return tracks.length;
} }
@Override @Override
public MediaFormat getFormat(int track) { public TrackGroup getTrackGroup(int group) {
return trackSources[track].getFormat(trackIndices[track]); return tracks[group];
} }
@Override @Override
public TrackStream enable(int track, long positionUs) { public TrackStream enable(int group, int[] tracks, long positionUs) {
return trackSources[track].enable(trackIndices[track], positionUs); Pair<Integer, Integer> sourceAndGroup = getSourceAndTrackGroupIndices(group);
return sources[sourceAndGroup.first].enable(sourceAndGroup.second, tracks, positionUs);
} }
@Override @Override
...@@ -129,4 +129,16 @@ public class MultiSampleSource implements SampleSource { ...@@ -129,4 +129,16 @@ public class MultiSampleSource implements SampleSource {
prepared = false; prepared = false;
} }
private Pair<Integer, Integer> getSourceAndTrackGroupIndices(int group) {
int totalTrackGroupCount = 0;
for (int i = 0; i < sources.length; i++) {
int sourceTrackGroupCount = sources[i].getTrackGroupCount();
if (group < totalTrackGroupCount + sourceTrackGroupCount) {
return Pair.create(i, group - totalTrackGroupCount);
}
totalTrackGroupCount += sourceTrackGroupCount;
}
throw new IndexOutOfBoundsException();
}
} }
...@@ -19,10 +19,6 @@ import java.io.IOException; ...@@ -19,10 +19,6 @@ import java.io.IOException;
/** /**
* A source of media. * A source of media.
* <p>
* A {@link SampleSource} may expose one or multiple tracks. The number of tracks and each track's
* media format can be queried using {@link #getTrackCount()} and {@link #getFormat(int)}
* respectively.
*/ */
public interface SampleSource { public interface SampleSource {
...@@ -56,30 +52,23 @@ public interface SampleSource { ...@@ -56,30 +52,23 @@ public interface SampleSource {
long getDurationUs(); long getDurationUs();
/** /**
* Returns the number of tracks exposed by the source. * Returns the number of track groups exposed by the source.
* <p> * <p>
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared.
* *
* @return The number of tracks. * @return The number of track groups exposed by the source.
*/ */
int getTrackCount(); public int getTrackGroupCount();
/** /**
* Returns the format of the specified track. * Returns the {@link TrackGroup} at the specified index.
* <p>
* Note that whilst the format of a track will remain constant, the format of the actual media
* stream may change dynamically. An example of this is where the track is adaptive (i.e.
* {@link MediaFormat#adaptive} is true). Hence the track formats returned through this method
* should not be used to configure decoders. Decoder configuration should be performed using the
* formats obtained when reading the media stream through calls to
* {@link TrackStream#readData(MediaFormatHolder, SampleHolder)}.
* <p> * <p>
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared.
* *
* @param track The track index. * @int group The group index.
* @return The format of the specified track. * @return The corresponding {@link TrackGroup}.
*/ */
MediaFormat getFormat(int track); public TrackGroup getTrackGroup(int group);
/** /**
* Indicates to the source that it should continue buffering data for its enabled tracks. * Indicates to the source that it should continue buffering data for its enabled tracks.
...@@ -112,17 +101,19 @@ public interface SampleSource { ...@@ -112,17 +101,19 @@ public interface SampleSource {
void seekToUs(long positionUs); void seekToUs(long positionUs);
/** /**
* Enables the specified track. Returning a {@link TrackStream} from which the track's data can * Enables the specified group to read the specified tracks. A {@link TrackStream} is returned
* be read. * through which the enabled track's data can be read.
* <p> * <p>
* This method should only be called after the source has been prepared, and when the specified * This method should only be called after the source has been prepared, and when the specified
* track is disabled. * group is disabled. Note that {@code tracks.length} is only permitted to be greater than one
* if {@link TrackGroup#adaptive} is true for the group.
* *
* @param track The track to enable. * @param group The group index.
* @param tracks The track indices.
* @param positionUs The current playback position in microseconds. * @param positionUs The current playback position in microseconds.
* @return A {@link TrackStream} from which the enabled track's data can be read. * @return A {@link TrackStream} from which the enabled track's data can be read.
*/ */
TrackStream enable(int track, long positionUs); public TrackStream enable(int group, int[] tracks, long positionUs);
/** /**
* Releases the source. * Releases the source.
......
...@@ -52,6 +52,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load ...@@ -52,6 +52,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
private final DataSource dataSource; private final DataSource dataSource;
private final MediaFormat format; private final MediaFormat format;
private final int minLoadableRetryCount; private final int minLoadableRetryCount;
private final TrackGroup tracks;
private int state; private int state;
private byte[] sampleData; private byte[] sampleData;
...@@ -73,6 +74,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load ...@@ -73,6 +74,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
this.dataSource = dataSource; this.dataSource = dataSource;
this.format = format; this.format = format;
this.minLoadableRetryCount = minLoadableRetryCount; this.minLoadableRetryCount = minLoadableRetryCount;
tracks = new TrackGroup(format);
sampleData = new byte[INITIAL_SAMPLE_SIZE]; sampleData = new byte[INITIAL_SAMPLE_SIZE];
} }
...@@ -102,17 +104,17 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load ...@@ -102,17 +104,17 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
} }
@Override @Override
public int getTrackCount() { public int getTrackGroupCount() {
return 1; return 1;
} }
@Override @Override
public MediaFormat getFormat(int track) { public TrackGroup getTrackGroup(int group) {
return format; return tracks;
} }
@Override @Override
public TrackStream enable(int track, long positionUs) { public TrackStream enable(int group, int[] tracks, long positionUs) {
state = STATE_SEND_FORMAT; state = STATE_SEND_FORMAT;
clearCurrentLoadableException(); clearCurrentLoadableException();
maybeStartLoading(); maybeStartLoading();
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer;
import com.google.android.exoplayer.SampleSource.TrackStream;
/**
* Defines a group of tracks exposed by a {@link SampleSource}.
* <p>
* A {@link SampleSource} is only able to provide one {@link TrackStream} corresponding to a group
* at any given time. If {@link #adaptive} is true this {@link TrackStream} can adapt between
* multiple tracks within the group. If {@link #adaptive} is false then it's only possible to
* consume one track from the group at a given time.
*/
public final class TrackGroup {
/**
* The number of tracks in the group.
*/
public final int length;
/**
* Whether it's possible to adapt between multiple tracks in the group.
*/
public final boolean adaptive;
private final MediaFormat[] formats;
/**
* @param format The format of the single track.
*/
public TrackGroup(MediaFormat format) {
this(false, format);
}
/**
* @param supportsAdaptive Whether it's possible to adapt between multiple tracks in the group.
* @param formats The track formats.
*/
public TrackGroup(boolean supportsAdaptive, MediaFormat... formats) {
this.adaptive = supportsAdaptive;
this.formats = formats;
length = formats.length;
}
/**
* Gets the format of the track at a given index.
*
* @param index The index of the track.
* @return The track's format.
*/
public MediaFormat getFormat(int index) {
return formats[index];
}
}
...@@ -22,6 +22,7 @@ import com.google.android.exoplayer.MediaFormatHolder; ...@@ -22,6 +22,7 @@ import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleHolder; import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource; import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.SampleSource.TrackStream; import com.google.android.exoplayer.SampleSource.TrackStream;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.extractor.DefaultTrackOutput; import com.google.android.exoplayer.extractor.DefaultTrackOutput;
import com.google.android.exoplayer.upstream.Loader; import com.google.android.exoplayer.upstream.Loader;
import com.google.android.exoplayer.upstream.Loader.Loadable; import com.google.android.exoplayer.upstream.Loader.Loadable;
...@@ -153,9 +154,11 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call ...@@ -153,9 +154,11 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call
return false; return false;
} }
durationUs = C.UNKNOWN_TIME_US; durationUs = C.UNKNOWN_TIME_US;
if (chunkSource.getTrackCount() > 0) { TrackGroup trackGroup = chunkSource.getTracks();
loader = new Loader("Loader:" + chunkSource.getFormat(0).mimeType); if (trackGroup.length > 0) {
durationUs = chunkSource.getFormat(0).durationUs; MediaFormat firstTrackFormat = trackGroup.getFormat(0);
loader = new Loader("Loader:" + firstTrackFormat.mimeType);
durationUs = firstTrackFormat.durationUs;
} }
state = STATE_PREPARED; state = STATE_PREPARED;
return true; return true;
...@@ -172,23 +175,22 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call ...@@ -172,23 +175,22 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call
} }
@Override @Override
public int getTrackCount() { public int getTrackGroupCount() {
Assertions.checkState(state != STATE_IDLE); return 1;
return chunkSource.getTrackCount();
} }
@Override @Override
public MediaFormat getFormat(int track) { public TrackGroup getTrackGroup(int group) {
Assertions.checkState(state != STATE_IDLE); Assertions.checkState(state != STATE_IDLE);
return chunkSource.getFormat(track); return chunkSource.getTracks();
} }
@Override @Override
public TrackStream enable(int track, long positionUs) { public TrackStream enable(int group, int[] tracks, long positionUs) {
Assertions.checkState(state == STATE_PREPARED); Assertions.checkState(state == STATE_PREPARED);
Assertions.checkState(enabledTrackCount++ == 0); Assertions.checkState(enabledTrackCount++ == 0);
state = STATE_ENABLED; state = STATE_ENABLED;
chunkSource.enable(track); chunkSource.enable(tracks);
loadControl.register(this, bufferSizeContribution); loadControl.register(this, bufferSizeContribution);
downstreamFormat = null; downstreamFormat = null;
downstreamMediaFormat = null; downstreamMediaFormat = null;
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
*/ */
package com.google.android.exoplayer.chunk; package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.TrackGroup;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
...@@ -48,33 +48,23 @@ public interface ChunkSource { ...@@ -48,33 +48,23 @@ public interface ChunkSource {
boolean prepare(); boolean prepare();
/** /**
* Returns the number of tracks exposed by the source. * Gets the group of tracks provided by the source.
* <p> * <p>
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared.
* *
* @return The number of tracks. * @return The track group.
*/ */
int getTrackCount(); TrackGroup getTracks();
/** /**
* Gets the format of the specified track. * Enable the source for the specified tracks.
* <p> * <p>
* This method should only be called after the source has been prepared. * This method should only be called after the source has been prepared and when the source is
*
* @param track The track index.
* @return The format of the track.
*/
MediaFormat getFormat(int track);
/**
* Enable the source for the specified track.
* <p>
* This method should only be called after the source has been prepared, and when the source is
* disabled. * disabled.
* *
* @param track The track index. * @param tracks The track indices.
*/ */
void enable(int track); void enable(int[] tracks);
/** /**
* Indicates to the source that it should still be checking for updates to the stream. * Indicates to the source that it should still be checking for updates to the stream.
......
...@@ -17,6 +17,7 @@ package com.google.android.exoplayer.chunk; ...@@ -17,6 +17,7 @@ package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.C; import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec; import com.google.android.exoplayer.upstream.DataSpec;
...@@ -34,7 +35,7 @@ public final class SingleSampleChunkSource implements ChunkSource { ...@@ -34,7 +35,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
private final DataSpec dataSpec; private final DataSpec dataSpec;
private final Format format; private final Format format;
private final long durationUs; private final long durationUs;
private final MediaFormat mediaFormat; private final TrackGroup tracks;
/** /**
* @param dataSource A {@link DataSource} suitable for loading the sample data. * @param dataSource A {@link DataSource} suitable for loading the sample data.
...@@ -50,7 +51,7 @@ public final class SingleSampleChunkSource implements ChunkSource { ...@@ -50,7 +51,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
this.dataSpec = dataSpec; this.dataSpec = dataSpec;
this.format = format; this.format = format;
this.durationUs = durationUs; this.durationUs = durationUs;
this.mediaFormat = mediaFormat; tracks = new TrackGroup(mediaFormat);
} }
@Override @Override
...@@ -59,17 +60,12 @@ public final class SingleSampleChunkSource implements ChunkSource { ...@@ -59,17 +60,12 @@ public final class SingleSampleChunkSource implements ChunkSource {
} }
@Override @Override
public int getTrackCount() { public TrackGroup getTracks() {
return 1; return tracks;
} }
@Override @Override
public MediaFormat getFormat(int track) { public void enable(int[] tracks) {
return mediaFormat;
}
@Override
public void enable(int track) {
// Do nothing. // Do nothing.
} }
...@@ -111,7 +107,7 @@ public final class SingleSampleChunkSource implements ChunkSource { ...@@ -111,7 +107,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
private SingleSampleMediaChunk initChunk() { private SingleSampleMediaChunk initChunk() {
return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_UNSPECIFIED, format, 0, return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_UNSPECIFIED, format, 0,
durationUs, 0, mediaFormat, null, Chunk.NO_PARENT_ID); durationUs, 0, tracks.getFormat(0), null, Chunk.NO_PARENT_ID);
} }
} }
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import java.io.IOException;
/**
* Specifies a track selection from a {@link Period} of a media presentation description.
*/
public interface DashTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified representations in the specified
* adaptation set.
*
* @param manifest The media presentation description being processed.
* @param periodIndex The index of the period being processed.
* @param adaptationSetIndex The index of the adaptation set within which the representations
* are located.
* @param representationIndices The indices of the track within the element.
*/
void adaptiveTrack(MediaPresentationDescription manifest, int periodIndex,
int adaptationSetIndex, int[] representationIndices);
/**
* Outputs an fixed track corresponding to the specified representation in the specified
* adaptation set.
*
* @param manifest The media presentation description being processed.
* @param periodIndex The index of the period being processed.
* @param adaptationSetIndex The index of the adaptation set within which the track is located.
* @param representationIndex The index of the representation within the adaptation set.
*/
void fixedTrack(MediaPresentationDescription manifest, int periodIndex, int adaptationSetIndex,
int representationIndex);
}
/**
* Outputs a track selection for a given period.
*
* @param manifest the media presentation description to process.
* @param periodIndex The index of the period to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the period.
*/
void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output)
throws IOException;
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import com.google.android.exoplayer.util.Util;
import android.content.Context;
import java.io.IOException;
/**
* A default {@link DashTrackSelector} implementation.
*/
// TODO: Add more configuration options (e.g. ability to disable adaptive track output).
public final class DefaultDashTrackSelector implements DashTrackSelector {
private final int adaptationSetType;
private final Context context;
private final boolean filterVideoRepresentations;
private final boolean filterProtectedHdContent;
/**
* @param context A context. May be null if {@code filterVideoRepresentations == false}.
* @param filterVideoRepresentations Whether video representations should be filtered according to
* the capabilities of the device. It is strongly recommended to set this to {@code true},
* unless the application has already verified that all representations are playable.
* @param filterProtectedHdContent Whether video representations that are both drm protected and
* high definition should be filtered when tracks are built. If
* {@code filterVideoRepresentations == false} then this parameter is ignored.
*/
public static DefaultDashTrackSelector newVideoInstance(Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_VIDEO, context,
filterVideoRepresentations, filterProtectedHdContent);
}
public static DefaultDashTrackSelector newAudioInstance() {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_AUDIO, null, false, false);
}
public static DefaultDashTrackSelector newTextInstance() {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_TEXT, null, false, false);
}
private DefaultDashTrackSelector(int adaptationSetType, Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
this.adaptationSetType = adaptationSetType;
this.context = context;
this.filterVideoRepresentations = filterVideoRepresentations;
this.filterProtectedHdContent = filterProtectedHdContent;
}
@Override
public void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output)
throws IOException {
Period period = manifest.getPeriod(periodIndex);
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
if (adaptationSet.type == adaptationSetType) {
if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
int[] representations;
if (filterVideoRepresentations) {
representations = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, adaptationSet.representations, null,
filterProtectedHdContent && adaptationSet.hasContentProtection());
} else {
representations = Util.firstIntegersArray(adaptationSet.representations.size());
}
int representationCount = representations.length;
if (representationCount > 1) {
output.adaptiveTrack(manifest, periodIndex, i, representations);
}
for (int j = 0; j < representationCount; j++) {
output.fixedTrack(manifest, periodIndex, i, representations[j]);
}
} else {
for (int j = 0; j < adaptationSet.representations.size(); j++) {
output.fixedTrack(manifest, periodIndex, i, j);
}
}
}
}
}
}
...@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormatHolder; ...@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.ParserException; import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder; import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource; import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.drm.DrmInitData; import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.upstream.Allocator; import com.google.android.exoplayer.upstream.Allocator;
import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.DataSource;
...@@ -174,7 +175,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu ...@@ -174,7 +175,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
private boolean prepared; private boolean prepared;
private int enabledTrackCount; private int enabledTrackCount;
private MediaFormat[] mediaFormats; private TrackGroup[] tracks;
private long durationUs; private long durationUs;
private boolean[] pendingMediaFormat; private boolean[] pendingMediaFormat;
private boolean[] pendingResets; private boolean[] pendingResets;
...@@ -262,14 +263,14 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu ...@@ -262,14 +263,14 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
if (seekMap != null && tracksBuilt && haveFormatsForAllTracks()) { if (seekMap != null && tracksBuilt && haveFormatsForAllTracks()) {
int trackCount = sampleQueues.size(); int trackCount = sampleQueues.size();
tracks = new TrackGroup[trackCount];
trackEnabledStates = new boolean[trackCount]; trackEnabledStates = new boolean[trackCount];
pendingResets = new boolean[trackCount]; pendingResets = new boolean[trackCount];
pendingMediaFormat = new boolean[trackCount]; pendingMediaFormat = new boolean[trackCount];
mediaFormats = new MediaFormat[trackCount];
durationUs = C.UNKNOWN_TIME_US; durationUs = C.UNKNOWN_TIME_US;
for (int i = 0; i < trackCount; i++) { for (int i = 0; i < trackCount; i++) {
MediaFormat format = sampleQueues.valueAt(i).getFormat(); MediaFormat format = sampleQueues.valueAt(i).getFormat();
mediaFormats[i] = format; tracks[i] = new TrackGroup(format);
if (format.durationUs > durationUs) { if (format.durationUs > durationUs) {
durationUs = format.durationUs; durationUs = format.durationUs;
} }
...@@ -292,24 +293,23 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu ...@@ -292,24 +293,23 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
} }
@Override @Override
public int getTrackCount() { public int getTrackGroupCount() {
return sampleQueues.size(); return tracks.length;
} }
@Override @Override
public MediaFormat getFormat(int track) { public TrackGroup getTrackGroup(int group) {
Assertions.checkState(prepared); return tracks[group];
return mediaFormats[track];
} }
@Override @Override
public TrackStream enable(int track, long positionUs) { public TrackStream enable(int group, int[] tracks, long positionUs) {
Assertions.checkState(prepared); Assertions.checkState(prepared);
Assertions.checkState(!trackEnabledStates[track]); Assertions.checkState(!trackEnabledStates[group]);
enabledTrackCount++; enabledTrackCount++;
trackEnabledStates[track] = true; trackEnabledStates[group] = true;
pendingMediaFormat[track] = true; pendingMediaFormat[group] = true;
pendingResets[track] = false; pendingResets[group] = false;
if (enabledTrackCount == 1) { if (enabledTrackCount == 1) {
// Treat all enables in non-seekable media as being from t=0. // Treat all enables in non-seekable media as being from t=0.
positionUs = !seekMap.isSeekable() ? 0 : positionUs; positionUs = !seekMap.isSeekable() ? 0 : positionUs;
...@@ -317,7 +317,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu ...@@ -317,7 +317,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
lastSeekPositionUs = positionUs; lastSeekPositionUs = positionUs;
restartFrom(positionUs); restartFrom(positionUs);
} }
return new TrackStreamImpl(track); return new TrackStreamImpl(group);
} }
/* package */ void disable(int track) { /* package */ void disable(int track) {
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.hls;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import android.content.Context;
import android.text.TextUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* A default {@link HlsTrackSelector} implementation.
*/
public final class DefaultHlsTrackSelector implements HlsTrackSelector {
private static final int TYPE_DEFAULT = 0;
private static final int TYPE_VTT = 1;
private final Context context;
private final int type;
/**
* Creates a {@link DefaultHlsTrackSelector} that selects the streams defined in the playlist.
*
* @param context A context.
* @return The selector instance.
*/
public static DefaultHlsTrackSelector newDefaultInstance(Context context) {
return new DefaultHlsTrackSelector(context, TYPE_DEFAULT);
}
/**
* Creates a {@link DefaultHlsTrackSelector} that selects subtitle renditions.
*
* @return The selector instance.
*/
public static DefaultHlsTrackSelector newVttInstance() {
return new DefaultHlsTrackSelector(null, TYPE_VTT);
}
private DefaultHlsTrackSelector(Context context, int type) {
this.context = context;
this.type = type;
}
@Override
public void selectTracks(HlsMasterPlaylist playlist, Output output) throws IOException {
if (type == TYPE_VTT) {
List<Variant> subtitleVariants = playlist.subtitles;
if (subtitleVariants != null && !subtitleVariants.isEmpty()) {
for (int i = 0; i < subtitleVariants.size(); i++) {
output.fixedTrack(playlist, subtitleVariants.get(i));
}
}
return;
}
// Type is TYPE_DEFAULT.
ArrayList<Variant> enabledVariantList = new ArrayList<>();
int[] variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, playlist.variants, null, false);
for (int i = 0; i < variantIndices.length; i++) {
enabledVariantList.add(playlist.variants.get(variantIndices[i]));
}
ArrayList<Variant> definiteVideoVariants = new ArrayList<>();
ArrayList<Variant> definiteAudioOnlyVariants = new ArrayList<>();
for (int i = 0; i < enabledVariantList.size(); i++) {
Variant variant = enabledVariantList.get(i);
if (variant.format.height > 0 || variantHasExplicitCodecWithPrefix(variant, "avc")) {
definiteVideoVariants.add(variant);
} else if (variantHasExplicitCodecWithPrefix(variant, "mp4a")) {
definiteAudioOnlyVariants.add(variant);
}
}
if (!definiteVideoVariants.isEmpty()) {
// We've identified some variants as definitely containing video. Assume variants within the
// master playlist are marked consistently, and hence that we have the full set. Filter out
// any other variants, which are likely to be audio only.
enabledVariantList = definiteVideoVariants;
} else if (definiteAudioOnlyVariants.size() < enabledVariantList.size()) {
// We've identified some variants, but not all, as being audio only. Filter them out to leave
// the remaining variants, which are likely to contain video.
enabledVariantList.removeAll(definiteAudioOnlyVariants);
} else {
// Leave the enabled variants unchanged. They're likely either all video or all audio.
}
if (enabledVariantList.size() > 1) {
Variant[] enabledVariants = new Variant[enabledVariantList.size()];
enabledVariantList.toArray(enabledVariants);
output.adaptiveTrack(playlist, enabledVariants);
}
for (int i = 0; i < enabledVariantList.size(); i++) {
output.fixedTrack(playlist, enabledVariantList.get(i));
}
}
private static boolean variantHasExplicitCodecWithPrefix(Variant variant, String prefix) {
String codecs = variant.format.codecs;
if (TextUtils.isEmpty(codecs)) {
return false;
}
String[] codecArray = codecs.split("(\\s*,\\s*)|(\\s*$)");
for (int i = 0; i < codecArray.length; i++) {
if (codecArray[i].startsWith(prefix)) {
return true;
}
}
return false;
}
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.hls;
import java.io.IOException;
/**
* Specifies a track selection from an {@link HlsMasterPlaylist}.
*/
public interface HlsTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified representations in the specified
* adaptation set.
*
* @param playlist The master playlist being processed.
* @param variants The variants to use for the adaptive track.
*/
void adaptiveTrack(HlsMasterPlaylist playlist, Variant[] variants);
/**
* Outputs an fixed track corresponding to the specified representation in the specified
* adaptation set.
*
* @param playlist The master playlist being processed.
* @param variant The variant to use for the track.
*/
void fixedTrack(HlsMasterPlaylist playlist, Variant variant);
}
/**
* Outputs a track selection for a given period.
*
* @param playlist The master playlist to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the period.
*/
void selectTracks(HlsMasterPlaylist playlist, Output output) throws IOException;
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.smoothstreaming;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.TrackElement;
import com.google.android.exoplayer.util.Util;
import android.content.Context;
import java.io.IOException;
import java.util.Arrays;
/**
* A default {@link SmoothStreamingTrackSelector} implementation.
*/
// TODO: Add more configuration options (e.g. ability to disable adaptive track output).
public final class DefaultSmoothStreamingTrackSelector implements SmoothStreamingTrackSelector {
private final int streamElementType;
private final Context context;
private final boolean filterVideoRepresentations;
private final boolean filterProtectedHdContent;
/**
* @param context A context. May be null if {@code filterVideoRepresentations == false}.
* @param filterVideoRepresentations Whether video representations should be filtered according to
* the capabilities of the device. It is strongly recommended to set this to {@code true},
* unless the application has already verified that all representations are playable.
* @param filterProtectedHdContent Whether video representations that are both drm protected and
* high definition should be filtered when tracks are built. If
* {@code filterVideoRepresentations == false} then this parameter is ignored.
*/
public static DefaultSmoothStreamingTrackSelector newVideoInstance(Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_VIDEO, context,
filterVideoRepresentations, filterProtectedHdContent);
}
public static DefaultSmoothStreamingTrackSelector newAudioInstance() {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_AUDIO, null, false, false);
}
public static DefaultSmoothStreamingTrackSelector newTextInstance() {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_TEXT, null, false, false);
}
private DefaultSmoothStreamingTrackSelector(int streamElementType, Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
this.context = context;
this.streamElementType = streamElementType;
this.filterVideoRepresentations = filterVideoRepresentations;
this.filterProtectedHdContent = filterProtectedHdContent;
}
@Override
public void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException {
for (int i = 0; i < manifest.streamElements.length; i++) {
TrackElement[] tracks = manifest.streamElements[i].tracks;
if (manifest.streamElements[i].type == streamElementType) {
if (streamElementType == StreamElement.TYPE_VIDEO) {
int[] trackIndices;
if (filterVideoRepresentations) {
trackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, Arrays.asList(tracks), null,
filterProtectedHdContent && manifest.protectionElement != null);
} else {
trackIndices = Util.firstIntegersArray(tracks.length);
}
int trackCount = trackIndices.length;
if (trackCount > 1) {
output.adaptiveTrack(manifest, i, trackIndices);
}
for (int j = 0; j < trackCount; j++) {
output.fixedTrack(manifest, i, trackIndices[j]);
}
} else {
for (int j = 0; j < tracks.length; j++) {
output.fixedTrack(manifest, i, j);
}
}
}
}
}
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.smoothstreaming;
import java.io.IOException;
/**
* Specifies a track selection from a {@link SmoothStreamingManifest}.
*/
public interface SmoothStreamingTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified tracks in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the adaptive tracks are located.
* @param tracks The indices of the tracks within the element.
*/
void adaptiveTrack(SmoothStreamingManifest manifest, int element, int[] tracks);
/**
* Outputs a fixed track corresponding to the specified track in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the track is located.
* @param track The index of the track within the element.
*/
void fixedTrack(SmoothStreamingManifest manifest, int element, int track);
}
/**
* Outputs a track selection for a given manifest.
*
* @param manifest The manifest to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the manifest.
*/
void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment