Commit be471489 by olly Committed by Oliver Woodman

ExoPlayer V2 Refactor - Step 4

Notes:
1. The logic in ExoPlayerImplInternal is very temporary, until we
   have proper TrackSelector implementations. Ignore the fact that
   it's crazy and has loads of nesting.
2. This change removes all capabilities checking. TrackRenderer
   implementations will be updated to perform these checks in a
   subsequent CL.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=113151233
parent 6cb20525
Showing with 683 additions and 1243 deletions
......@@ -22,7 +22,7 @@ import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.DefaultDashTrackSelector;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser;
import com.google.android.exoplayer.dash.mpd.UtcTimingElement;
......@@ -201,8 +201,7 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the video renderer.
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newVideoInstance(context, true, false),
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_VIDEO,
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
......@@ -211,18 +210,18 @@ public class DashSourceBuilder implements SourceBuilder {
// Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_AUDIO,
audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
// Build the text renderer.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, AdaptationSet.TYPE_TEXT,
textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player,
DemoPlayer.TYPE_TEXT);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
......
......@@ -18,7 +18,6 @@ package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder;
import com.google.android.exoplayer.hls.DefaultHlsTrackSelector;
import com.google.android.exoplayer.hls.HlsChunkSource;
import com.google.android.exoplayer.hls.HlsPlaylist;
import com.google.android.exoplayer.hls.HlsPlaylistParser;
......@@ -120,9 +119,8 @@ public class HlsSourceBuilder implements SourceBuilder {
// Build the video/audio/metadata renderers.
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
HlsChunkSource chunkSource = new HlsChunkSource(true /* isMaster */, dataSource, url,
manifest, DefaultHlsTrackSelector.newDefaultInstance(context), bandwidthMeter,
timestampAdjusterProvider, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
HlsChunkSource chunkSource = new HlsChunkSource(HlsChunkSource.TYPE_DEFAULT, dataSource, url,
manifest, bandwidthMeter, timestampAdjusterProvider, HlsChunkSource.ADAPTIVE_MODE_SPLICE);
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
MAIN_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO);
......
......@@ -23,7 +23,6 @@ import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.demo.player.DemoPlayer.SourceBuilder;
import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.smoothstreaming.DefaultSmoothStreamingTrackSelector;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser;
......@@ -152,7 +151,7 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the video renderer.
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false),
SmoothStreamingManifest.StreamElement.TYPE_VIDEO,
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
......@@ -161,20 +160,18 @@ public class SmoothStreamingSourceBuilder implements SourceBuilder {
// Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newAudioInstance(),
audioDataSource, null, LIVE_EDGE_LATENCY_MS);
SmoothStreamingManifest.StreamElement.TYPE_AUDIO, audioDataSource, null,
LIVE_EDGE_LATENCY_MS);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO);
// Build the text renderer.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newTextInstance(),
textDataSource, null, LIVE_EDGE_LATENCY_MS);
SmoothStreamingManifest.StreamElement.TYPE_TEXT, textDataSource, null,
LIVE_EDGE_LATENCY_MS);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT);
// Invoke the callback.
player.onSource(
......
......@@ -81,10 +81,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
}
public void testGetAvailableRangeOnVod() {
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(),
DefaultDashTrackSelector.newVideoInstance(null, false, false), null, null);
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), AdaptationSet.TYPE_VIDEO, null,
null);
chunkSource.prepare();
chunkSource.enable(0);
chunkSource.enable(new int[] {0});
TimeRange availableRange = chunkSource.getAvailableRange();
checkAvailableRange(availableRange, 0, VOD_DURATION_MS * 1000);
......@@ -103,9 +103,9 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
public void testGetAvailableRangeOnMultiPeriodVod() {
DashChunkSource chunkSource = new DashChunkSource(buildMultiPeriodVodMpd(),
DefaultDashTrackSelector.newVideoInstance(null, false, false), null, null);
AdaptationSet.TYPE_VIDEO, null, null);
chunkSource.prepare();
chunkSource.enable(0);
chunkSource.enable(new int[] {0});
TimeRange availableRange = chunkSource.getAvailableRange();
checkAvailableRange(availableRange, 0, MULTI_PERIOD_VOD_DURATION_MS * 1000);
}
......@@ -118,11 +118,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
}
public void testSegmentIndexInitializationOnVod() {
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(),
DefaultDashTrackSelector.newVideoInstance(null, false, false), mock(DataSource.class),
null);
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), AdaptationSet.TYPE_VIDEO,
mock(DataSource.class), null);
chunkSource.prepare();
chunkSource.enable(0);
chunkSource.enable(new int[] {0});
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
......@@ -322,12 +321,12 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
ManifestFetcher<MediaPresentationDescription> manifestFetcher = mock(ManifestFetcher.class);
when(manifestFetcher.getManifest()).thenReturn(mpd);
DashChunkSource chunkSource = new DashChunkSource(manifestFetcher, mpd,
DefaultDashTrackSelector.newVideoInstance(null, false, false), mock(DataSource.class), null,
AdaptationSet.TYPE_VIDEO, mock(DataSource.class), null,
new FakeClock(mpd.availabilityStartTime + mpd.duration - ELAPSED_REALTIME_OFFSET_MS),
liveEdgeLatencyMs * 1000, ELAPSED_REALTIME_OFFSET_MS * 1000, startAtLiveEdge, null, null,
0);
chunkSource.prepare();
chunkSource.enable(0);
chunkSource.enable(new int[] {0});
return chunkSource;
}
......
......@@ -13,4 +13,4 @@
# Project target.
target=android-23
android.library=false
android.library.reference.1=../experimental
android.library.reference.1=../main
......@@ -75,7 +75,8 @@ import java.util.concurrent.atomic.AtomicInteger;
private final long minBufferUs;
private final long minRebufferUs;
private final List<TrackRenderer> enabledRenderers;
private final int[][] trackIndices;
private final int[][] groupIndices;
private final int[][][] trackIndices;
private final int[] selectedTrackIndices;
private final Handler handler;
private final HandlerThread internalPlaybackThread;
......@@ -125,7 +126,8 @@ import java.util.concurrent.atomic.AtomicInteger;
standaloneMediaClock = new StandaloneMediaClock();
pendingSeekCount = new AtomicInteger();
enabledRenderers = new ArrayList<>(renderers.length);
trackIndices = new int[renderers.length][];
groupIndices = new int[renderers.length][];
trackIndices = new int[renderers.length][][];
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect.
internalPlaybackThread = new PriorityHandlerThread("ExoPlayerImplInternal:Handler",
......@@ -301,24 +303,56 @@ import java.util.concurrent.atomic.AtomicInteger;
boolean allRenderersEnded = true;
boolean allRenderersReadyOrEnded = true;
// Establish the mapping from renderer to track index (trackIndices), and build a list of
// formats corresponding to each renderer (trackFormats).
int trackCount = source.getTrackCount();
boolean[] trackMappedFlags = new boolean[trackCount];
// The maximum number of tracks that one renderer can support is the total number of tracks in
// all groups, plus possibly one adaptive track per group.
int maxTrackCount = source.getTrackGroupCount();
for (int groupIndex = 0; groupIndex < source.getTrackGroupCount(); groupIndex++) {
maxTrackCount += source.getTrackGroup(groupIndex).length;
}
// Construct tracks for each renderer.
MediaFormat[][] trackFormats = new MediaFormat[renderers.length][];
for (int rendererIndex = 0; rendererIndex < renderers.length; rendererIndex++) {
TrackRenderer renderer = renderers[rendererIndex];
int rendererTrackCount = 0;
int[] rendererTrackIndices = new int[trackCount];
MediaFormat[] rendererTrackFormats = new MediaFormat[trackCount];
for (int trackIndex = 0; trackIndex < trackCount; trackIndex++) {
MediaFormat trackFormat = source.getFormat(trackIndex);
if (!trackMappedFlags[trackIndex] && renderer.handlesTrack(trackFormat)) {
trackMappedFlags[trackIndex] = true;
rendererTrackIndices[rendererTrackCount] = trackIndex;
rendererTrackFormats[rendererTrackCount++] = trackFormat;
int[] rendererTrackGroups = new int[maxTrackCount];
int[][] rendererTrackIndices = new int[maxTrackCount][];
MediaFormat[] rendererTrackFormats = new MediaFormat[maxTrackCount];
for (int groupIndex = 0; groupIndex < source.getTrackGroupCount(); groupIndex++) {
TrackGroup trackGroup = source.getTrackGroup(groupIndex);
// TODO[REFACTOR]: This should check that the renderer is capable of adaptive playback, in
// addition to checking that the group is adaptive.
if (trackGroup.adaptive) {
// Try and build an adaptive track.
int adaptiveTrackIndexCount = 0;
int[] adaptiveTrackIndices = new int[trackGroup.length];
MediaFormat adaptiveTrackFormat = null;
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
MediaFormat trackFormat = source.getTrackGroup(groupIndex).getFormat(trackIndex);
if (renderer.handlesTrack(trackFormat)) {
adaptiveTrackIndices[adaptiveTrackIndexCount++] = trackIndex;
if (adaptiveTrackFormat == null) {
adaptiveTrackFormat = trackFormat.copyAsAdaptive("auto");
}
}
}
if (adaptiveTrackIndexCount > 1) {
// We succeeded in building an adaptive track.
rendererTrackGroups[rendererTrackCount] = groupIndex;
rendererTrackIndices[rendererTrackCount] =
Arrays.copyOf(adaptiveTrackIndices, adaptiveTrackIndexCount);
rendererTrackFormats[rendererTrackCount++] = adaptiveTrackFormat;
}
}
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
MediaFormat trackFormat = source.getTrackGroup(groupIndex).getFormat(trackIndex);
if (renderer.handlesTrack(trackFormat)) {
rendererTrackGroups[rendererTrackCount] = groupIndex;
rendererTrackIndices[rendererTrackCount] = new int[] {trackIndex};
rendererTrackFormats[rendererTrackCount++] = trackFormat;
}
}
}
groupIndices[rendererIndex] = Arrays.copyOf(rendererTrackGroups, rendererTrackCount);
trackIndices[rendererIndex] = Arrays.copyOf(rendererTrackIndices, rendererTrackCount);
trackFormats[rendererIndex] = Arrays.copyOf(rendererTrackFormats, rendererTrackCount);
}
......@@ -328,8 +362,8 @@ import java.util.concurrent.atomic.AtomicInteger;
TrackRenderer renderer = renderers[rendererIndex];
int trackIndex = selectedTrackIndices[rendererIndex];
if (0 <= trackIndex && trackIndex < trackIndices[rendererIndex].length) {
int sourceTrackIndex = trackIndices[rendererIndex][trackIndex];
TrackStream trackStream = source.enable(sourceTrackIndex, positionUs);
TrackStream trackStream = source.enable(groupIndices[rendererIndex][trackIndex],
trackIndices[rendererIndex][trackIndex], positionUs);
renderer.enable(trackStream, positionUs, false);
enabledRenderers.add(renderer);
allRenderersEnded = allRenderersEnded && renderer.isEnded();
......@@ -606,8 +640,8 @@ import java.util.concurrent.atomic.AtomicInteger;
boolean playing = playWhenReady && state == ExoPlayer.STATE_READY;
// Consider as joining if the renderer was previously disabled, but not when switching tracks.
boolean joining = !isEnabled && playing;
int sourceTrackIndex = trackIndices[rendererIndex][trackIndex];
TrackStream trackStream = source.enable(sourceTrackIndex, positionUs);
TrackStream trackStream = source.enable(groupIndices[rendererIndex][trackIndex],
trackIndices[rendererIndex][trackIndex], positionUs);
renderer.enable(trackStream, positionUs, joining);
enabledRenderers.add(renderer);
if (playing) {
......
......@@ -71,14 +71,14 @@ public final class FrameworkSampleSource implements SampleSource {
private final long fileDescriptorOffset;
private final long fileDescriptorLength;
private MediaExtractor extractor;
private MediaFormat[] trackFormats;
private boolean prepared;
private long durationUs;
private int enabledTrackCount;
private MediaExtractor extractor;
private TrackGroup[] tracks;
private int[] trackStates;
private boolean[] pendingResets;
private int enabledTrackCount;
private long lastSeekPositionUs;
private long pendingSeekPositionUs;
......@@ -132,10 +132,11 @@ public final class FrameworkSampleSource implements SampleSource {
durationUs = C.UNKNOWN_TIME_US;
trackStates = new int[extractor.getTrackCount()];
pendingResets = new boolean[trackStates.length];
trackFormats = new MediaFormat[trackStates.length];
tracks = new TrackGroup[trackStates.length];
for (int i = 0; i < trackStates.length; i++) {
trackFormats[i] = createMediaFormat(extractor.getTrackFormat(i));
long trackDurationUs = trackFormats[i].durationUs;
MediaFormat format = createMediaFormat(extractor.getTrackFormat(i));
tracks[i] = new TrackGroup(format);
long trackDurationUs = format.durationUs;
if (trackDurationUs > durationUs) {
durationUs = trackDurationUs;
}
......@@ -155,15 +156,13 @@ public final class FrameworkSampleSource implements SampleSource {
}
@Override
public int getTrackCount() {
Assertions.checkState(prepared);
return trackStates.length;
public int getTrackGroupCount() {
return tracks.length;
}
@Override
public MediaFormat getFormat(int track) {
Assertions.checkState(prepared);
return trackFormats[track];
public TrackGroup getTrackGroup(int group) {
return tracks[group];
}
@Override
......@@ -172,14 +171,14 @@ public final class FrameworkSampleSource implements SampleSource {
}
@Override
public TrackStream enable(int track, long positionUs) {
public TrackStream enable(int group, int[] track, long positionUs) {
Assertions.checkState(prepared);
Assertions.checkState(trackStates[track] == TRACK_STATE_DISABLED);
Assertions.checkState(trackStates[group] == TRACK_STATE_DISABLED);
enabledTrackCount++;
trackStates[track] = TRACK_STATE_ENABLED;
extractor.selectTrack(track);
trackStates[group] = TRACK_STATE_ENABLED;
extractor.selectTrack(group);
seekToUsInternal(positionUs, positionUs != 0);
return new TrackStreamImpl(track);
return new TrackStreamImpl(group);
}
/* package */ long readReset(int track) {
......@@ -197,7 +196,7 @@ public final class FrameworkSampleSource implements SampleSource {
return TrackStream.NOTHING_READ;
}
if (trackStates[track] != TRACK_STATE_FORMAT_SENT) {
formatHolder.format = trackFormats[track];
formatHolder.format = tracks[track].getFormat(0);
formatHolder.drmInitData = Util.SDK_INT >= 18 ? getDrmInitDataV18() : null;
trackStates[track] = TRACK_STATE_FORMAT_SENT;
return TrackStream.FORMAT_READ;
......
......@@ -28,7 +28,6 @@ import android.text.TextUtils;
import android.util.Log;
import android.util.Pair;
import java.io.IOException;
import java.util.HashMap;
/**
......@@ -43,8 +42,7 @@ public final class MediaCodecUtil {
* Such failures are not expected in normal operation and are normally temporary (e.g. if the
* mediaserver process has crashed and is yet to restart).
*/
// TODO[REFACTOR]: Shouldn't implement IOException.
public static class DecoderQueryException extends IOException {
public static class DecoderQueryException extends Exception {
private DecoderQueryException(Throwable cause) {
super("Failed to query underlying media codecs", cause);
......
......@@ -15,6 +15,8 @@
*/
package com.google.android.exoplayer;
import android.util.Pair;
import java.io.IOException;
/**
......@@ -26,8 +28,7 @@ public class MultiSampleSource implements SampleSource {
private boolean prepared;
private long durationUs;
private SampleSource[] trackSources;
private int[] trackIndices;
private TrackGroup[] tracks;
public MultiSampleSource(SampleSource... sources) {
this.sources = sources;
......@@ -45,21 +46,19 @@ public class MultiSampleSource implements SampleSource {
if (prepared) {
this.prepared = true;
this.durationUs = C.UNKNOWN_TIME_US;
int trackCount = 0;
int totalTrackGroupCount = 0;
for (int i = 0; i < sources.length; i++) {
trackCount += sources[i].getTrackCount();
totalTrackGroupCount += sources[i].getTrackGroupCount();
if (sources[i].getDurationUs() > durationUs) {
durationUs = sources[i].getDurationUs();
}
}
trackSources = new SampleSource[trackCount];
trackIndices = new int[trackCount];
int index = 0;
tracks = new TrackGroup[totalTrackGroupCount];
int trackGroupIndex = 0;
for (int i = 0; i < sources.length; i++) {
int thisSourceTrackCount = sources[i].getTrackCount();
for (int j = 0; j < thisSourceTrackCount; j++) {
trackSources[index] = sources[i];
trackIndices[index++] = j;
int sourceTrackGroupCount = sources[i].getTrackGroupCount();
for (int j = 0; j < sourceTrackGroupCount; j++) {
tracks[trackGroupIndex++] = sources[i].getTrackGroup(j);
}
}
}
......@@ -72,18 +71,19 @@ public class MultiSampleSource implements SampleSource {
}
@Override
public int getTrackCount() {
return trackSources.length;
public int getTrackGroupCount() {
return tracks.length;
}
@Override
public MediaFormat getFormat(int track) {
return trackSources[track].getFormat(trackIndices[track]);
public TrackGroup getTrackGroup(int group) {
return tracks[group];
}
@Override
public TrackStream enable(int track, long positionUs) {
return trackSources[track].enable(trackIndices[track], positionUs);
public TrackStream enable(int group, int[] tracks, long positionUs) {
Pair<Integer, Integer> sourceAndGroup = getSourceAndTrackGroupIndices(group);
return sources[sourceAndGroup.first].enable(sourceAndGroup.second, tracks, positionUs);
}
@Override
......@@ -129,4 +129,16 @@ public class MultiSampleSource implements SampleSource {
prepared = false;
}
private Pair<Integer, Integer> getSourceAndTrackGroupIndices(int group) {
int totalTrackGroupCount = 0;
for (int i = 0; i < sources.length; i++) {
int sourceTrackGroupCount = sources[i].getTrackGroupCount();
if (group < totalTrackGroupCount + sourceTrackGroupCount) {
return Pair.create(i, group - totalTrackGroupCount);
}
totalTrackGroupCount += sourceTrackGroupCount;
}
throw new IndexOutOfBoundsException();
}
}
......@@ -19,10 +19,6 @@ import java.io.IOException;
/**
* A source of media.
* <p>
* A {@link SampleSource} may expose one or multiple tracks. The number of tracks and each track's
* media format can be queried using {@link #getTrackCount()} and {@link #getFormat(int)}
* respectively.
*/
public interface SampleSource {
......@@ -56,30 +52,23 @@ public interface SampleSource {
long getDurationUs();
/**
* Returns the number of tracks exposed by the source.
* Returns the number of track groups exposed by the source.
* <p>
* This method should only be called after the source has been prepared.
*
* @return The number of tracks.
* @return The number of track groups exposed by the source.
*/
int getTrackCount();
public int getTrackGroupCount();
/**
* Returns the format of the specified track.
* <p>
* Note that whilst the format of a track will remain constant, the format of the actual media
* stream may change dynamically. An example of this is where the track is adaptive (i.e.
* {@link MediaFormat#adaptive} is true). Hence the track formats returned through this method
* should not be used to configure decoders. Decoder configuration should be performed using the
* formats obtained when reading the media stream through calls to
* {@link TrackStream#readData(MediaFormatHolder, SampleHolder)}.
* Returns the {@link TrackGroup} at the specified index.
* <p>
* This method should only be called after the source has been prepared.
*
* @param track The track index.
* @return The format of the specified track.
* @int group The group index.
* @return The corresponding {@link TrackGroup}.
*/
MediaFormat getFormat(int track);
public TrackGroup getTrackGroup(int group);
/**
* Indicates to the source that it should continue buffering data for its enabled tracks.
......@@ -112,17 +101,19 @@ public interface SampleSource {
void seekToUs(long positionUs);
/**
* Enables the specified track. Returning a {@link TrackStream} from which the track's data can
* be read.
* Enables the specified group to read the specified tracks. A {@link TrackStream} is returned
* through which the enabled track's data can be read.
* <p>
* This method should only be called after the source has been prepared, and when the specified
* track is disabled.
* group is disabled. Note that {@code tracks.length} is only permitted to be greater than one
* if {@link TrackGroup#adaptive} is true for the group.
*
* @param track The track to enable.
* @param group The group index.
* @param tracks The track indices.
* @param positionUs The current playback position in microseconds.
* @return A {@link TrackStream} from which the enabled track's data can be read.
*/
TrackStream enable(int track, long positionUs);
public TrackStream enable(int group, int[] tracks, long positionUs);
/**
* Releases the source.
......
......@@ -52,6 +52,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
private final DataSource dataSource;
private final MediaFormat format;
private final int minLoadableRetryCount;
private final TrackGroup tracks;
private int state;
private byte[] sampleData;
......@@ -73,6 +74,7 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
this.dataSource = dataSource;
this.format = format;
this.minLoadableRetryCount = minLoadableRetryCount;
tracks = new TrackGroup(format);
sampleData = new byte[INITIAL_SAMPLE_SIZE];
}
......@@ -102,17 +104,17 @@ public final class SingleSampleSource implements SampleSource, TrackStream, Load
}
@Override
public int getTrackCount() {
public int getTrackGroupCount() {
return 1;
}
@Override
public MediaFormat getFormat(int track) {
return format;
public TrackGroup getTrackGroup(int group) {
return tracks;
}
@Override
public TrackStream enable(int track, long positionUs) {
public TrackStream enable(int group, int[] tracks, long positionUs) {
state = STATE_SEND_FORMAT;
clearCurrentLoadableException();
maybeStartLoading();
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer;
import com.google.android.exoplayer.SampleSource.TrackStream;
/**
* Defines a group of tracks exposed by a {@link SampleSource}.
* <p>
* A {@link SampleSource} is only able to provide one {@link TrackStream} corresponding to a group
* at any given time. If {@link #adaptive} is true this {@link TrackStream} can adapt between
* multiple tracks within the group. If {@link #adaptive} is false then it's only possible to
* consume one track from the group at a given time.
*/
public final class TrackGroup {
/**
* The number of tracks in the group.
*/
public final int length;
/**
* Whether it's possible to adapt between multiple tracks in the group.
*/
public final boolean adaptive;
private final MediaFormat[] formats;
/**
* @param format The format of the single track.
*/
public TrackGroup(MediaFormat format) {
this(false, format);
}
/**
* @param supportsAdaptive Whether it's possible to adapt between multiple tracks in the group.
* @param formats The track formats.
*/
public TrackGroup(boolean supportsAdaptive, MediaFormat... formats) {
this.adaptive = supportsAdaptive;
this.formats = formats;
length = formats.length;
}
/**
* Gets the format of the track at a given index.
*
* @param index The index of the track.
* @return The track's format.
*/
public MediaFormat getFormat(int index) {
return formats[index];
}
}
......@@ -22,6 +22,7 @@ import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.SampleSource.TrackStream;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.extractor.DefaultTrackOutput;
import com.google.android.exoplayer.upstream.Loader;
import com.google.android.exoplayer.upstream.Loader.Loadable;
......@@ -153,9 +154,11 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call
return false;
}
durationUs = C.UNKNOWN_TIME_US;
if (chunkSource.getTrackCount() > 0) {
loader = new Loader("Loader:" + chunkSource.getFormat(0).mimeType);
durationUs = chunkSource.getFormat(0).durationUs;
TrackGroup trackGroup = chunkSource.getTracks();
if (trackGroup.length > 0) {
MediaFormat firstTrackFormat = trackGroup.getFormat(0);
loader = new Loader("Loader:" + firstTrackFormat.mimeType);
durationUs = firstTrackFormat.durationUs;
}
state = STATE_PREPARED;
return true;
......@@ -172,23 +175,22 @@ public class ChunkSampleSource implements SampleSource, TrackStream, Loader.Call
}
@Override
public int getTrackCount() {
Assertions.checkState(state != STATE_IDLE);
return chunkSource.getTrackCount();
public int getTrackGroupCount() {
return 1;
}
@Override
public MediaFormat getFormat(int track) {
public TrackGroup getTrackGroup(int group) {
Assertions.checkState(state != STATE_IDLE);
return chunkSource.getFormat(track);
return chunkSource.getTracks();
}
@Override
public TrackStream enable(int track, long positionUs) {
public TrackStream enable(int group, int[] tracks, long positionUs) {
Assertions.checkState(state == STATE_PREPARED);
Assertions.checkState(enabledTrackCount++ == 0);
state = STATE_ENABLED;
chunkSource.enable(track);
chunkSource.enable(tracks);
loadControl.register(this, bufferSizeContribution);
downstreamFormat = null;
downstreamMediaFormat = null;
......
......@@ -15,7 +15,7 @@
*/
package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TrackGroup;
import java.io.IOException;
import java.util.List;
......@@ -48,33 +48,23 @@ public interface ChunkSource {
boolean prepare();
/**
* Returns the number of tracks exposed by the source.
* Gets the group of tracks provided by the source.
* <p>
* This method should only be called after the source has been prepared.
*
* @return The number of tracks.
* @return The track group.
*/
int getTrackCount();
TrackGroup getTracks();
/**
* Gets the format of the specified track.
* Enable the source for the specified tracks.
* <p>
* This method should only be called after the source has been prepared.
*
* @param track The track index.
* @return The format of the track.
*/
MediaFormat getFormat(int track);
/**
* Enable the source for the specified track.
* <p>
* This method should only be called after the source has been prepared, and when the source is
* This method should only be called after the source has been prepared and when the source is
* disabled.
*
* @param track The track index.
* @param tracks The track indices.
*/
void enable(int track);
void enable(int[] tracks);
/**
* Indicates to the source that it should still be checking for updates to the stream.
......
......@@ -17,6 +17,7 @@ package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
......@@ -34,7 +35,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
private final DataSpec dataSpec;
private final Format format;
private final long durationUs;
private final MediaFormat mediaFormat;
private final TrackGroup tracks;
/**
* @param dataSource A {@link DataSource} suitable for loading the sample data.
......@@ -50,7 +51,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
this.dataSpec = dataSpec;
this.format = format;
this.durationUs = durationUs;
this.mediaFormat = mediaFormat;
tracks = new TrackGroup(mediaFormat);
}
@Override
......@@ -59,17 +60,12 @@ public final class SingleSampleChunkSource implements ChunkSource {
}
@Override
public int getTrackCount() {
return 1;
public TrackGroup getTracks() {
return tracks;
}
@Override
public MediaFormat getFormat(int track) {
return mediaFormat;
}
@Override
public void enable(int track) {
public void enable(int[] tracks) {
// Do nothing.
}
......@@ -111,7 +107,7 @@ public final class SingleSampleChunkSource implements ChunkSource {
private SingleSampleMediaChunk initChunk() {
return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_UNSPECIFIED, format, 0,
durationUs, 0, mediaFormat, null, Chunk.NO_PARENT_ID);
durationUs, 0, tracks.getFormat(0), null, Chunk.NO_PARENT_ID);
}
}
......@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.TimeRange.DynamicTimeRange;
import com.google.android.exoplayer.TimeRange.StaticTimeRange;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.chunk.Chunk;
import com.google.android.exoplayer.chunk.ChunkExtractorWrapper;
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
......@@ -33,7 +34,6 @@ import com.google.android.exoplayer.chunk.FormatEvaluator.Evaluation;
import com.google.android.exoplayer.chunk.InitializationChunk;
import com.google.android.exoplayer.chunk.MediaChunk;
import com.google.android.exoplayer.chunk.SingleSampleMediaChunk;
import com.google.android.exoplayer.dash.DashTrackSelector.Output;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.ContentProtection;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
......@@ -56,7 +56,6 @@ import android.util.Log;
import android.util.SparseArray;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
......@@ -75,7 +74,8 @@ import java.util.List;
* </ol>
*/
// TODO: handle cases where the above assumption are false
public class DashChunkSource implements ChunkSource, Output {
// TODO[REFACTOR]: Handle multiple adaptation sets of the same type (at a higher level).
public class DashChunkSource implements ChunkSource {
/**
* Interface definition for a callback to be notified of {@link DashChunkSource} events.
......@@ -108,12 +108,11 @@ public class DashChunkSource implements ChunkSource, Output {
private final Handler eventHandler;
private final EventListener eventListener;
private final int adaptationSetType;
private final DataSource dataSource;
private final FormatEvaluator adaptiveFormatEvaluator;
private final Evaluation evaluation;
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
private final DashTrackSelector trackSelector;
private final ArrayList<ExposedTrack> tracks;
private final SparseArray<PeriodHolder> periodHolders;
private final Clock systemClock;
private final long liveEdgeLatencyUs;
......@@ -122,20 +121,28 @@ public class DashChunkSource implements ChunkSource, Output {
private final boolean live;
private final int eventSourceId;
private boolean prepareCalled;
private MediaPresentationDescription currentManifest;
private MediaPresentationDescription processedManifest;
private ExposedTrack enabledTrack;
private int nextPeriodHolderIndex;
private TimeRange availableRange;
private boolean prepareCalled;
private boolean startAtLiveEdge;
private boolean lastChunkWasInitialization;
private IOException fatalError;
// Properties of exposed tracks.
private int adaptationSetIndex;
private TrackGroup trackGroup;
private Format[] trackFormats;
// Properties of enabled tracks.
private Format[] enabledFormats;
private int adaptiveMaxWidth;
private int adaptiveMaxHeight;
/**
* Lightweight constructor to use for fixed duration content.
*
* @param trackSelector Selects tracks to be exposed by this source.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param durationMs The duration of the content.
......@@ -144,17 +151,15 @@ public class DashChunkSource implements ChunkSource, Output {
* {@link AdaptationSet#TYPE_TEXT}.
* @param representations The representations to be considered by the source.
*/
public DashChunkSource(DashTrackSelector trackSelector, DataSource dataSource,
FormatEvaluator adaptiveFormatEvaluator, long durationMs, int adaptationSetType,
Representation... representations) {
this(trackSelector, dataSource, adaptiveFormatEvaluator, durationMs, adaptationSetType,
public DashChunkSource(DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
long durationMs, int adaptationSetType, Representation... representations) {
this(dataSource, adaptiveFormatEvaluator, durationMs, adaptationSetType,
Arrays.asList(representations));
}
/**
* Lightweight constructor to use for fixed duration content.
*
* @param trackSelector Selects tracks to be exposed by this source.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param durationMs The duration of the content.
......@@ -163,25 +168,26 @@ public class DashChunkSource implements ChunkSource, Output {
* {@link AdaptationSet#TYPE_TEXT}.
* @param representations The representations to be considered by the source.
*/
public DashChunkSource(DashTrackSelector trackSelector, DataSource dataSource,
FormatEvaluator adaptiveFormatEvaluator, long durationMs, int adaptationSetType,
List<Representation> representations) {
this(buildManifest(durationMs, adaptationSetType, representations), trackSelector, dataSource,
adaptiveFormatEvaluator);
public DashChunkSource(DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
long durationMs, int adaptationSetType, List<Representation> representations) {
this(buildManifest(durationMs, adaptationSetType, representations), adaptationSetType,
dataSource, adaptiveFormatEvaluator);
}
/**
* Constructor to use for fixed duration content.
*
* @param manifest The manifest.
* @param trackSelector Selects tracks from manifest periods to be exposed by this source.
* @param adaptationSetType The type of the adaptation set exposed by this source. One of
* {@link AdaptationSet#TYPE_AUDIO}, {@link AdaptationSet#TYPE_VIDEO} and
* {@link AdaptationSet#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
*/
public DashChunkSource(MediaPresentationDescription manifest, DashTrackSelector trackSelector,
public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetType,
DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator) {
this(null, manifest, trackSelector, dataSource, adaptiveFormatEvaluator, new SystemClock(), 0,
0, false, null, null, 0);
this(null, manifest, adaptationSetType, dataSource, adaptiveFormatEvaluator, new SystemClock(),
0, 0, false, null, null, 0);
}
/**
......@@ -192,7 +198,9 @@ public class DashChunkSource implements ChunkSource, Output {
*
* @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load.
* @param trackSelector Selects tracks from manifest periods to be exposed by this source.
* @param adaptationSetType The type of the adaptation set exposed by this source. One of
* {@link AdaptationSet#TYPE_AUDIO}, {@link AdaptationSet#TYPE_VIDEO} and
* {@link AdaptationSet#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
......@@ -209,10 +217,10 @@ public class DashChunkSource implements ChunkSource, Output {
* @param eventSourceId An identifier that gets passed to {@code eventListener} methods.
*/
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
DashTrackSelector trackSelector, DataSource dataSource,
FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs,
Handler eventHandler, EventListener eventListener, int eventSourceId) {
this(manifestFetcher, manifestFetcher.getManifest(), trackSelector,
int adaptationSetType, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, Handler eventHandler,
EventListener eventListener, int eventSourceId) {
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetType,
dataSource, adaptiveFormatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
elapsedRealtimeOffsetMs * 1000, true, eventHandler, eventListener, eventSourceId);
}
......@@ -222,7 +230,9 @@ public class DashChunkSource implements ChunkSource, Output {
*
* @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load.
* @param trackSelector Selects tracks from manifest periods to be exposed by this source.
* @param adaptationSetType The type of the adaptation set exposed by this source. One of
* {@link AdaptationSet#TYPE_AUDIO}, {@link AdaptationSet#TYPE_VIDEO} and
* {@link AdaptationSet#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
......@@ -241,25 +251,24 @@ public class DashChunkSource implements ChunkSource, Output {
* @param eventSourceId An identifier that gets passed to {@code eventListener} methods.
*/
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
DashTrackSelector trackSelector, DataSource dataSource,
FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs,
boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener,
int eventSourceId) {
this(manifestFetcher, manifestFetcher.getManifest(), trackSelector,
int adaptationSetType, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, boolean startAtLiveEdge,
Handler eventHandler, EventListener eventListener, int eventSourceId) {
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetType,
dataSource, adaptiveFormatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
elapsedRealtimeOffsetMs * 1000, startAtLiveEdge, eventHandler, eventListener,
eventSourceId);
}
/* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
MediaPresentationDescription initialManifest, DashTrackSelector trackSelector,
MediaPresentationDescription initialManifest, int adaptationSetType,
DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs,
boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener,
int eventSourceId) {
this.manifestFetcher = manifestFetcher;
this.currentManifest = initialManifest;
this.trackSelector = trackSelector;
this.adaptationSetType = adaptationSetType;
this.dataSource = dataSource;
this.adaptiveFormatEvaluator = adaptiveFormatEvaluator;
this.systemClock = systemClock;
......@@ -272,7 +281,6 @@ public class DashChunkSource implements ChunkSource, Output {
this.evaluation = new Evaluation();
this.availableRangeValues = new long[2];
periodHolders = new SparseArray<>();
tracks = new ArrayList<>();
live = initialManifest.dynamic;
}
......@@ -291,30 +299,34 @@ public class DashChunkSource implements ChunkSource, Output {
public boolean prepare() {
if (!prepareCalled) {
prepareCalled = true;
try {
trackSelector.selectTracks(currentManifest, 0, this);
} catch (IOException e) {
fatalError = e;
}
selectTracks(currentManifest, 0);
}
return fatalError == null;
}
@Override
public int getTrackCount() {
return tracks.size();
return true;
}
@Override
public final MediaFormat getFormat(int track) {
return tracks.get(track).trackFormat;
public final TrackGroup getTracks() {
return trackGroup;
}
@Override
public void enable(int track) {
enabledTrack = tracks.get(track);
if (enabledTrack.isAdaptive()) {
public void enable(int[] tracks) {
int maxWidth = -1;
int maxHeight = -1;
enabledFormats = new Format[tracks.length];
for (int i = 0; i < tracks.length; i++) {
enabledFormats[i] = trackFormats[tracks[i]];
maxWidth = Math.max(enabledFormats[i].width, maxWidth);
maxHeight = Math.max(enabledFormats[i].height, maxHeight);
}
Arrays.sort(enabledFormats, new DecreasingBandwidthComparator());
if (enabledFormats.length > 1) {
adaptiveMaxWidth = maxWidth;
adaptiveMaxHeight = maxHeight;
adaptiveFormatEvaluator.enable();
} else {
adaptiveMaxWidth = -1;
adaptiveMaxHeight = -1;
}
if (manifestFetcher != null) {
manifestFetcher.enable();
......@@ -363,11 +375,10 @@ public class DashChunkSource implements ChunkSource, Output {
evaluation.queueSize = queue.size();
if (evaluation.format == null || !lastChunkWasInitialization) {
if (enabledTrack.isAdaptive()) {
adaptiveFormatEvaluator.evaluate(queue, playbackPositionUs, enabledTrack.adaptiveFormats,
evaluation);
if (enabledFormats.length > 1) {
adaptiveFormatEvaluator.evaluate(queue, playbackPositionUs, enabledFormats, evaluation);
} else {
evaluation.format = enabledTrack.fixedFormat;
evaluation.format = enabledFormats[0];
evaluation.trigger = Chunk.TRIGGER_MANUAL;
}
}
......@@ -488,7 +499,7 @@ public class DashChunkSource implements ChunkSource, Output {
: startingNewPeriod ? representationHolder.getFirstAvailableSegmentNum()
: queue.get(out.queueSize - 1).getNextChunkIndex();
Chunk nextMediaChunk = newMediaChunk(periodHolder, representationHolder, dataSource,
mediaFormat, enabledTrack, segmentNum, evaluation.trigger);
mediaFormat, adaptiveMaxWidth, adaptiveMaxHeight, segmentNum, evaluation.trigger);
lastChunkWasInitialization = false;
out.chunk = nextMediaChunk;
}
......@@ -529,7 +540,7 @@ public class DashChunkSource implements ChunkSource, Output {
@Override
public void disable(List<? extends MediaChunk> queue) {
if (enabledTrack.isAdaptive()) {
if (enabledFormats.length > 1) {
adaptiveFormatEvaluator.disable();
}
if (manifestFetcher != null) {
......@@ -539,72 +550,53 @@ public class DashChunkSource implements ChunkSource, Output {
evaluation.format = null;
availableRange = null;
fatalError = null;
enabledTrack = null;
enabledFormats = null;
}
// DashTrackSelector.Output implementation.
// Private methods.
@Override
public void adaptiveTrack(MediaPresentationDescription manifest, int periodIndex,
int adaptationSetIndex, int[] representationIndices) {
if (adaptiveFormatEvaluator == null) {
Log.w(TAG, "Skipping adaptive track (missing format evaluator)");
return;
}
AdaptationSet adaptationSet = manifest.getPeriod(periodIndex).adaptationSets.get(
adaptationSetIndex);
int maxWidth = 0;
int maxHeight = 0;
Format maxHeightRepresentationFormat = null;
Format[] representationFormats = new Format[representationIndices.length];
for (int i = 0; i < representationFormats.length; i++) {
Format format = adaptationSet.representations.get(representationIndices[i]).format;
if (maxHeightRepresentationFormat == null || format.height > maxHeight) {
maxHeightRepresentationFormat = format;
private void selectTracks(MediaPresentationDescription manifest, int periodIndex) {
Period period = manifest.getPeriod(periodIndex);
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
if (adaptationSet.type == adaptationSetType) {
// We've found an adaptation set of the exposed type.
adaptationSetIndex = i;
List<Representation> representations = adaptationSet.representations;
trackFormats = new Format[representations.size()];
MediaFormat[] trackMediaFormats = new MediaFormat[representations.size()];
int trackCount = 0;
for (int j = 0; j < trackMediaFormats.length; j++) {
trackMediaFormats[trackCount] = getMediaFormat(manifest, representations.get(j).format);
if (trackMediaFormats[trackCount] != null) {
trackFormats[trackCount++] = representations.get(j).format;
}
}
trackGroup = new TrackGroup(adaptiveFormatEvaluator != null,
Arrays.copyOf(trackMediaFormats, trackCount));
return;
}
maxWidth = Math.max(maxWidth, format.width);
maxHeight = Math.max(maxHeight, format.height);
representationFormats[i] = format;
}
Arrays.sort(representationFormats, new DecreasingBandwidthComparator());
long trackDurationUs = live ? C.UNKNOWN_TIME_US : manifest.duration * 1000;
String mediaMimeType = getMediaMimeType(maxHeightRepresentationFormat);
if (mediaMimeType == null) {
Log.w(TAG, "Skipped adaptive track (unknown media mime type)");
return;
}
MediaFormat trackFormat = getTrackFormat(adaptationSet.type, maxHeightRepresentationFormat,
mediaMimeType, trackDurationUs);
if (trackFormat == null) {
Log.w(TAG, "Skipped adaptive track (unknown media format)");
return;
}
tracks.add(new ExposedTrack(trackFormat.copyAsAdaptive(null), adaptationSetIndex,
representationFormats, maxWidth, maxHeight));
trackGroup = new TrackGroup(adaptiveFormatEvaluator != null);
trackFormats = new Format[0];
}
@Override
public void fixedTrack(MediaPresentationDescription manifest, int periodIndex,
int adaptationSetIndex, int representationIndex) {
List<AdaptationSet> adaptationSets = manifest.getPeriod(periodIndex).adaptationSets;
AdaptationSet adaptationSet = adaptationSets.get(adaptationSetIndex);
Format representationFormat = adaptationSet.representations.get(representationIndex).format;
private MediaFormat getMediaFormat(MediaPresentationDescription manifest,
Format representationFormat) {
String mediaMimeType = getMediaMimeType(representationFormat);
if (mediaMimeType == null) {
Log.w(TAG, "Skipped track " + representationFormat.id + " (unknown media mime type)");
return;
return null;
}
MediaFormat trackFormat = getTrackFormat(adaptationSet.type, representationFormat,
MediaFormat trackFormat = getTrackFormat(adaptationSetType, representationFormat,
mediaMimeType, manifest.dynamic ? C.UNKNOWN_TIME_US : manifest.duration * 1000);
if (trackFormat == null) {
Log.w(TAG, "Skipped track " + representationFormat.id + " (unknown media format)");
return;
return null;
}
tracks.add(new ExposedTrack(trackFormat, adaptationSetIndex, representationFormat));
return trackFormat;
}
// Private methods.
// Visible for testing.
/* package */ TimeRange getAvailableRange() {
return availableRange;
......@@ -629,8 +621,8 @@ public class DashChunkSource implements ChunkSource, Output {
MediaFormat.NO_VALUE, durationUs, format.audioChannels, format.audioSamplingRate, null,
format.language);
case AdaptationSet.TYPE_TEXT:
return MediaFormat.createTextFormat(format.id, mediaMimeType, format.bitrate,
durationUs, format.language);
return MediaFormat.createTextFormat(format.id, mediaMimeType, format.bitrate, durationUs,
format.language);
default:
return null;
}
......@@ -680,9 +672,9 @@ public class DashChunkSource implements ChunkSource, Output {
extractor, manifestIndex);
}
protected Chunk newMediaChunk(
PeriodHolder periodHolder, RepresentationHolder representationHolder, DataSource dataSource,
MediaFormat mediaFormat, ExposedTrack enabledTrack, int segmentNum, int trigger) {
protected Chunk newMediaChunk(PeriodHolder periodHolder,
RepresentationHolder representationHolder, DataSource dataSource, MediaFormat mediaFormat,
int adaptiveMaxWidth, int adaptiveMaxHeight, int segmentNum, int trigger) {
Representation representation = representationHolder.representation;
Format format = representation.format;
long startTimeUs = representationHolder.getSegmentStartTimeUs(segmentNum);
......@@ -694,13 +686,12 @@ public class DashChunkSource implements ChunkSource, Output {
long sampleOffsetUs = periodHolder.startTimeUs - representation.presentationTimeOffsetUs;
if (mimeTypeIsRawText(format.mimeType)) {
return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_INITIAL, format,
startTimeUs, endTimeUs, segmentNum, enabledTrack.trackFormat, null,
periodHolder.localIndex);
startTimeUs, endTimeUs, segmentNum, mediaFormat, null, periodHolder.localIndex);
} else {
boolean isMediaFormatFinal = (mediaFormat != null);
return new ContainerMediaChunk(dataSource, dataSpec, trigger, format, startTimeUs, endTimeUs,
segmentNum, sampleOffsetUs, representationHolder.extractorWrapper, mediaFormat,
enabledTrack.adaptiveMaxWidth, enabledTrack.adaptiveMaxHeight, periodHolder.drmInitData,
adaptiveMaxWidth, adaptiveMaxHeight, periodHolder.drmInitData,
isMediaFormatFinal, periodHolder.localIndex);
}
}
......@@ -752,10 +743,10 @@ public class DashChunkSource implements ChunkSource, Output {
try {
int periodHolderCount = periodHolders.size();
if (periodHolderCount > 0) {
periodHolders.valueAt(0).updatePeriod(manifest, 0, enabledTrack);
periodHolders.valueAt(0).updatePeriod(manifest, 0, adaptationSetIndex);
if (periodHolderCount > 1) {
int lastIndex = periodHolderCount - 1;
periodHolders.valueAt(lastIndex).updatePeriod(manifest, lastIndex, enabledTrack);
periodHolders.valueAt(lastIndex).updatePeriod(manifest, lastIndex, adaptationSetIndex);
}
}
} catch (BehindLiveWindowException e) {
......@@ -765,7 +756,8 @@ public class DashChunkSource implements ChunkSource, Output {
// Add new periods.
for (int i = periodHolders.size(); i < manifest.getPeriodCount(); i++) {
PeriodHolder holder = new PeriodHolder(nextPeriodHolderIndex, manifest, i, enabledTrack);
PeriodHolder holder = new PeriodHolder(nextPeriodHolderIndex, manifest, i, adaptationSetIndex,
enabledFormats);
periodHolders.put(nextPeriodHolderIndex, holder);
nextPeriodHolderIndex++;
}
......@@ -813,45 +805,6 @@ public class DashChunkSource implements ChunkSource, Output {
// Protected classes.
protected static final class ExposedTrack {
public final MediaFormat trackFormat;
public final int adaptiveMaxWidth;
public final int adaptiveMaxHeight;
private final int adaptationSetIndex;
// Non-adaptive track variables.
private final Format fixedFormat;
// Adaptive track variables.
private final Format[] adaptiveFormats;
public ExposedTrack(MediaFormat trackFormat, int adaptationSetIndex, Format fixedFormat) {
this.trackFormat = trackFormat;
this.adaptationSetIndex = adaptationSetIndex;
this.fixedFormat = fixedFormat;
this.adaptiveFormats = null;
this.adaptiveMaxWidth = -1;
this.adaptiveMaxHeight = -1;
}
public ExposedTrack(MediaFormat trackFormat, int adaptationSetIndex, Format[] adaptiveFormats,
int maxWidth, int maxHeight) {
this.trackFormat = trackFormat;
this.adaptationSetIndex = adaptationSetIndex;
this.adaptiveFormats = adaptiveFormats;
this.adaptiveMaxWidth = maxWidth;
this.adaptiveMaxHeight = maxHeight;
this.fixedFormat = null;
}
public boolean isAdaptive() {
return adaptiveFormats != null;
}
}
protected static final class RepresentationHolder {
public final boolean mimeTypeIsRawText;
......@@ -966,26 +919,25 @@ public class DashChunkSource implements ChunkSource, Output {
private long availableEndTimeUs;
public PeriodHolder(int localIndex, MediaPresentationDescription manifest, int manifestIndex,
ExposedTrack selectedTrack) {
int adaptationSetIndex, Format[] enabledFormats) {
this.localIndex = localIndex;
Period period = manifest.getPeriod(manifestIndex);
long periodDurationUs = getPeriodDurationUs(manifest, manifestIndex);
AdaptationSet adaptationSet = period.adaptationSets.get(selectedTrack.adaptationSetIndex);
AdaptationSet adaptationSet = period.adaptationSets.get(adaptationSetIndex);
List<Representation> representations = adaptationSet.representations;
startTimeUs = period.startMs * 1000;
drmInitData = getDrmInitData(adaptationSet);
if (!selectedTrack.isAdaptive()) {
representationIndices = new int[] {
getRepresentationIndex(representations, selectedTrack.fixedFormat.id)};
} else {
representationIndices = new int[selectedTrack.adaptiveFormats.length];
for (int j = 0; j < selectedTrack.adaptiveFormats.length; j++) {
representationIndices[j] = getRepresentationIndex(
representations, selectedTrack.adaptiveFormats[j].id);
if (enabledFormats.length > 1) {
representationIndices = new int[enabledFormats.length];
for (int j = 0; j < enabledFormats.length; j++) {
representationIndices[j] = getRepresentationIndex(representations, enabledFormats[j].id);
}
} else {
representationIndices = new int[] {
getRepresentationIndex(representations, enabledFormats[0].id)};
}
representationHolders = new HashMap<>();
......@@ -1000,11 +952,11 @@ public class DashChunkSource implements ChunkSource, Output {
}
public void updatePeriod(MediaPresentationDescription manifest, int manifestIndex,
ExposedTrack selectedTrack) throws BehindLiveWindowException {
int adaptationSetIndex) throws BehindLiveWindowException {
Period period = manifest.getPeriod(manifestIndex);
long periodDurationUs = getPeriodDurationUs(manifest, manifestIndex);
List<Representation> representations = period.adaptationSets
.get(selectedTrack.adaptationSetIndex).representations;
.get(adaptationSetIndex).representations;
for (int j = 0; j < representationIndices.length; j++) {
Representation representation = representations.get(representationIndices[j]);
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import java.io.IOException;
/**
* Specifies a track selection from a {@link Period} of a media presentation description.
*/
public interface DashTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified representations in the specified
* adaptation set.
*
* @param manifest The media presentation description being processed.
* @param periodIndex The index of the period being processed.
* @param adaptationSetIndex The index of the adaptation set within which the representations
* are located.
* @param representationIndices The indices of the track within the element.
*/
void adaptiveTrack(MediaPresentationDescription manifest, int periodIndex,
int adaptationSetIndex, int[] representationIndices);
/**
* Outputs an fixed track corresponding to the specified representation in the specified
* adaptation set.
*
* @param manifest The media presentation description being processed.
* @param periodIndex The index of the period being processed.
* @param adaptationSetIndex The index of the adaptation set within which the track is located.
* @param representationIndex The index of the representation within the adaptation set.
*/
void fixedTrack(MediaPresentationDescription manifest, int periodIndex, int adaptationSetIndex,
int representationIndex);
}
/**
* Outputs a track selection for a given period.
*
* @param manifest the media presentation description to process.
* @param periodIndex The index of the period to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the period.
*/
void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output)
throws IOException;
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import com.google.android.exoplayer.util.Util;
import android.content.Context;
import java.io.IOException;
/**
* A default {@link DashTrackSelector} implementation.
*/
// TODO: Add more configuration options (e.g. ability to disable adaptive track output).
public final class DefaultDashTrackSelector implements DashTrackSelector {
private final int adaptationSetType;
private final Context context;
private final boolean filterVideoRepresentations;
private final boolean filterProtectedHdContent;
/**
* @param context A context. May be null if {@code filterVideoRepresentations == false}.
* @param filterVideoRepresentations Whether video representations should be filtered according to
* the capabilities of the device. It is strongly recommended to set this to {@code true},
* unless the application has already verified that all representations are playable.
* @param filterProtectedHdContent Whether video representations that are both drm protected and
* high definition should be filtered when tracks are built. If
* {@code filterVideoRepresentations == false} then this parameter is ignored.
*/
public static DefaultDashTrackSelector newVideoInstance(Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_VIDEO, context,
filterVideoRepresentations, filterProtectedHdContent);
}
public static DefaultDashTrackSelector newAudioInstance() {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_AUDIO, null, false, false);
}
public static DefaultDashTrackSelector newTextInstance() {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_TEXT, null, false, false);
}
private DefaultDashTrackSelector(int adaptationSetType, Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
this.adaptationSetType = adaptationSetType;
this.context = context;
this.filterVideoRepresentations = filterVideoRepresentations;
this.filterProtectedHdContent = filterProtectedHdContent;
}
@Override
public void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output)
throws IOException {
Period period = manifest.getPeriod(periodIndex);
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
if (adaptationSet.type == adaptationSetType) {
if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
int[] representations;
if (filterVideoRepresentations) {
representations = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, adaptationSet.representations, null,
filterProtectedHdContent && adaptationSet.hasContentProtection());
} else {
representations = Util.firstIntegersArray(adaptationSet.representations.size());
}
int representationCount = representations.length;
if (representationCount > 1) {
output.adaptiveTrack(manifest, periodIndex, i, representations);
}
for (int j = 0; j < representationCount; j++) {
output.fixedTrack(manifest, periodIndex, i, representations[j]);
}
} else {
for (int j = 0; j < adaptationSet.representations.size(); j++) {
output.fixedTrack(manifest, periodIndex, i, j);
}
}
}
}
}
}
......@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.upstream.Allocator;
import com.google.android.exoplayer.upstream.DataSource;
......@@ -174,7 +175,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
private boolean prepared;
private int enabledTrackCount;
private MediaFormat[] mediaFormats;
private TrackGroup[] tracks;
private long durationUs;
private boolean[] pendingMediaFormat;
private boolean[] pendingResets;
......@@ -262,14 +263,14 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
if (seekMap != null && tracksBuilt && haveFormatsForAllTracks()) {
int trackCount = sampleQueues.size();
tracks = new TrackGroup[trackCount];
trackEnabledStates = new boolean[trackCount];
pendingResets = new boolean[trackCount];
pendingMediaFormat = new boolean[trackCount];
mediaFormats = new MediaFormat[trackCount];
durationUs = C.UNKNOWN_TIME_US;
for (int i = 0; i < trackCount; i++) {
MediaFormat format = sampleQueues.valueAt(i).getFormat();
mediaFormats[i] = format;
tracks[i] = new TrackGroup(format);
if (format.durationUs > durationUs) {
durationUs = format.durationUs;
}
......@@ -292,24 +293,23 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
}
@Override
public int getTrackCount() {
return sampleQueues.size();
public int getTrackGroupCount() {
return tracks.length;
}
@Override
public MediaFormat getFormat(int track) {
Assertions.checkState(prepared);
return mediaFormats[track];
public TrackGroup getTrackGroup(int group) {
return tracks[group];
}
@Override
public TrackStream enable(int track, long positionUs) {
public TrackStream enable(int group, int[] tracks, long positionUs) {
Assertions.checkState(prepared);
Assertions.checkState(!trackEnabledStates[track]);
Assertions.checkState(!trackEnabledStates[group]);
enabledTrackCount++;
trackEnabledStates[track] = true;
pendingMediaFormat[track] = true;
pendingResets[track] = false;
trackEnabledStates[group] = true;
pendingMediaFormat[group] = true;
pendingResets[group] = false;
if (enabledTrackCount == 1) {
// Treat all enables in non-seekable media as being from t=0.
positionUs = !seekMap.isSeekable() ? 0 : positionUs;
......@@ -317,7 +317,7 @@ public final class ExtractorSampleSource implements SampleSource, ExtractorOutpu
lastSeekPositionUs = positionUs;
restartFrom(positionUs);
}
return new TrackStreamImpl(track);
return new TrackStreamImpl(group);
}
/* package */ void disable(int track) {
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.hls;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import android.content.Context;
import android.text.TextUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* A default {@link HlsTrackSelector} implementation.
*/
public final class DefaultHlsTrackSelector implements HlsTrackSelector {
private static final int TYPE_DEFAULT = 0;
private static final int TYPE_VTT = 1;
private final Context context;
private final int type;
/**
* Creates a {@link DefaultHlsTrackSelector} that selects the streams defined in the playlist.
*
* @param context A context.
* @return The selector instance.
*/
public static DefaultHlsTrackSelector newDefaultInstance(Context context) {
return new DefaultHlsTrackSelector(context, TYPE_DEFAULT);
}
/**
* Creates a {@link DefaultHlsTrackSelector} that selects subtitle renditions.
*
* @return The selector instance.
*/
public static DefaultHlsTrackSelector newVttInstance() {
return new DefaultHlsTrackSelector(null, TYPE_VTT);
}
private DefaultHlsTrackSelector(Context context, int type) {
this.context = context;
this.type = type;
}
@Override
public void selectTracks(HlsMasterPlaylist playlist, Output output) throws IOException {
if (type == TYPE_VTT) {
List<Variant> subtitleVariants = playlist.subtitles;
if (subtitleVariants != null && !subtitleVariants.isEmpty()) {
for (int i = 0; i < subtitleVariants.size(); i++) {
output.fixedTrack(playlist, subtitleVariants.get(i));
}
}
return;
}
// Type is TYPE_DEFAULT.
ArrayList<Variant> enabledVariantList = new ArrayList<>();
int[] variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, playlist.variants, null, false);
for (int i = 0; i < variantIndices.length; i++) {
enabledVariantList.add(playlist.variants.get(variantIndices[i]));
}
ArrayList<Variant> definiteVideoVariants = new ArrayList<>();
ArrayList<Variant> definiteAudioOnlyVariants = new ArrayList<>();
for (int i = 0; i < enabledVariantList.size(); i++) {
Variant variant = enabledVariantList.get(i);
if (variant.format.height > 0 || variantHasExplicitCodecWithPrefix(variant, "avc")) {
definiteVideoVariants.add(variant);
} else if (variantHasExplicitCodecWithPrefix(variant, "mp4a")) {
definiteAudioOnlyVariants.add(variant);
}
}
if (!definiteVideoVariants.isEmpty()) {
// We've identified some variants as definitely containing video. Assume variants within the
// master playlist are marked consistently, and hence that we have the full set. Filter out
// any other variants, which are likely to be audio only.
enabledVariantList = definiteVideoVariants;
} else if (definiteAudioOnlyVariants.size() < enabledVariantList.size()) {
// We've identified some variants, but not all, as being audio only. Filter them out to leave
// the remaining variants, which are likely to contain video.
enabledVariantList.removeAll(definiteAudioOnlyVariants);
} else {
// Leave the enabled variants unchanged. They're likely either all video or all audio.
}
if (enabledVariantList.size() > 1) {
Variant[] enabledVariants = new Variant[enabledVariantList.size()];
enabledVariantList.toArray(enabledVariants);
output.adaptiveTrack(playlist, enabledVariants);
}
for (int i = 0; i < enabledVariantList.size(); i++) {
output.fixedTrack(playlist, enabledVariantList.get(i));
}
}
private static boolean variantHasExplicitCodecWithPrefix(Variant variant, String prefix) {
String codecs = variant.format.codecs;
if (TextUtils.isEmpty(codecs)) {
return false;
}
String[] codecArray = codecs.split("(\\s*,\\s*)|(\\s*$)");
for (int i = 0; i < codecArray.length; i++) {
if (codecArray[i].startsWith(prefix)) {
return true;
}
}
return false;
}
}
......@@ -39,6 +39,7 @@ import com.google.android.exoplayer.util.Util;
import android.net.Uri;
import android.os.SystemClock;
import android.text.TextUtils;
import android.util.Log;
import java.io.ByteArrayInputStream;
......@@ -54,13 +55,16 @@ import java.util.Locale;
/**
* A temporary test source of HLS chunks.
*/
public class HlsChunkSource implements HlsTrackSelector.Output {
public class HlsChunkSource {
/**
* Interface definition for a callback to be notified of {@link HlsChunkSource} events.
*/
public interface EventListener extends BaseChunkSampleSourceEventListener {}
public static final int TYPE_DEFAULT = 0;
public static final int TYPE_VTT = 1;
/**
* Adaptive switching is disabled.
* <p>
......@@ -119,11 +123,10 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
private static final String WEBVTT_FILE_EXTENSION = ".webvtt";
private static final float BANDWIDTH_FRACTION = 0.8f;
private final boolean isMaster;
private final int type;
private final DataSource dataSource;
private final HlsPlaylistParser playlistParser;
private final HlsMasterPlaylist masterPlaylist;
private final HlsTrackSelector trackSelector;
private final BandwidthMeter bandwidthMeter;
private final PtsTimestampAdjusterProvider timestampAdjusterProvider;
private final int adaptiveMode;
......@@ -131,22 +134,6 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
private final long minBufferDurationToSwitchUpUs;
private final long maxBufferDurationToSwitchDownUs;
// TODO: Expose tracks.
private final ArrayList<ExposedTrack> tracks;
private int selectedTrackIndex;
// A list of variants considered during playback, ordered by decreasing bandwidth. The following
// three arrays are of the same length and are ordered in the same way (i.e. variantPlaylists[i],
// variantLastPlaylistLoadTimesMs[i] and variantBlacklistTimes[i] all correspond to variants[i]).
private Variant[] variants;
private HlsMediaPlaylist[] variantPlaylists;
private long[] variantLastPlaylistLoadTimesMs;
private long[] variantBlacklistTimes;
// The index in variants of the currently selected variant.
private int selectedVariantIndex;
private boolean prepareCalled;
private byte[] scratchSpace;
private boolean live;
......@@ -158,14 +145,24 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
private String encryptionIvString;
private byte[] encryptionIv;
// Properties of exposed tracks.
private Variant[] exposedVariants;
// Properties of enabled variants.
private Variant[] enabledVariants;
private HlsMediaPlaylist[] enabledVariantPlaylists;
private long[] enabledVariantLastPlaylistLoadTimesMs;
private long[] enabledVariantBlacklistTimes;
private int adaptiveMaxWidth;
private int adaptiveMaxHeight;
private int selectedVariantIndex;
/**
* @param isMaster True if this is the master source for the playback. False otherwise. Each
* playback must have exactly one master source, which should be the source providing video
* chunks (or audio chunks for audio only playbacks).
* @param type The type of chunk provided by the source. One of {@link #TYPE_DEFAULT} and
* {@link #TYPE_VTT}.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param playlistUrl The playlist URL.
* @param playlist The hls playlist.
* @param trackSelector Selects tracks to be exposed by this source.
* @param bandwidthMeter Provides an estimate of the currently available bandwidth.
* @param timestampAdjusterProvider A provider of {@link PtsTimestampAdjuster} instances. If
* multiple {@link HlsChunkSource}s are used for a single playback, they should all share the
......@@ -174,22 +171,19 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
* {@link #ADAPTIVE_MODE_NONE}, {@link #ADAPTIVE_MODE_ABRUPT} and
* {@link #ADAPTIVE_MODE_SPLICE}.
*/
public HlsChunkSource(boolean isMaster, DataSource dataSource, String playlistUrl,
HlsPlaylist playlist, HlsTrackSelector trackSelector, BandwidthMeter bandwidthMeter,
PtsTimestampAdjusterProvider timestampAdjusterProvider, int adaptiveMode) {
this(isMaster, dataSource, playlistUrl, playlist, trackSelector, bandwidthMeter,
timestampAdjusterProvider, adaptiveMode, DEFAULT_MIN_BUFFER_TO_SWITCH_UP_MS,
DEFAULT_MAX_BUFFER_TO_SWITCH_DOWN_MS);
public HlsChunkSource(int type, DataSource dataSource, String playlistUrl, HlsPlaylist playlist,
BandwidthMeter bandwidthMeter, PtsTimestampAdjusterProvider timestampAdjusterProvider,
int adaptiveMode) {
this(type, dataSource, playlistUrl, playlist, bandwidthMeter, timestampAdjusterProvider,
adaptiveMode, DEFAULT_MIN_BUFFER_TO_SWITCH_UP_MS, DEFAULT_MAX_BUFFER_TO_SWITCH_DOWN_MS);
}
/**
* @param isMaster True if this is the master source for the playback. False otherwise. Each
* playback must have exactly one master source, which should be the source providing video
* chunks (or audio chunks for audio only playbacks).
* @param type The type of chunk provided by the source. One of {@link #TYPE_DEFAULT} and
* {@link #TYPE_VTT}.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param playlistUrl The playlist URL.
* @param playlist The hls playlist.
* @param trackSelector Selects tracks to be exposed by this source.
* @param bandwidthMeter Provides an estimate of the currently available bandwidth.
* @param timestampAdjusterProvider A provider of {@link PtsTimestampAdjuster} instances. If
* multiple {@link HlsChunkSource}s are used for a single playback, they should all share the
......@@ -202,13 +196,11 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
* @param maxBufferDurationToSwitchDownMs The maximum duration of media that needs to be buffered
* for a switch to a lower quality variant to be considered.
*/
public HlsChunkSource(boolean isMaster, DataSource dataSource, String playlistUrl,
HlsPlaylist playlist, HlsTrackSelector trackSelector, BandwidthMeter bandwidthMeter,
PtsTimestampAdjusterProvider timestampAdjusterProvider, int adaptiveMode,
long minBufferDurationToSwitchUpMs, long maxBufferDurationToSwitchDownMs) {
this.isMaster = isMaster;
public HlsChunkSource(int type, DataSource dataSource, String playlistUrl, HlsPlaylist playlist,
BandwidthMeter bandwidthMeter, PtsTimestampAdjusterProvider timestampAdjusterProvider,
int adaptiveMode, long minBufferDurationToSwitchUpMs, long maxBufferDurationToSwitchDownMs) {
this.type = type;
this.dataSource = dataSource;
this.trackSelector = trackSelector;
this.bandwidthMeter = bandwidthMeter;
this.timestampAdjusterProvider = timestampAdjusterProvider;
this.adaptiveMode = adaptiveMode;
......@@ -216,8 +208,6 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
maxBufferDurationToSwitchDownUs = maxBufferDurationToSwitchDownMs * 1000;
baseUri = playlist.baseUri;
playlistParser = new HlsPlaylistParser();
tracks = new ArrayList<>();
if (playlist.type == HlsPlaylist.TYPE_MASTER) {
masterPlaylist = (HlsMasterPlaylist) playlist;
} else {
......@@ -250,14 +240,11 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
public boolean prepare() {
if (!prepareCalled) {
prepareCalled = true;
try {
trackSelector.selectTracks(masterPlaylist, this);
selectTrack(0);
} catch (IOException e) {
fatalError = e;
}
processMasterPlaylist(masterPlaylist);
// TODO[REFACTOR]: Come up with a sane default here.
selectTracks(new int[] {0});
}
return fatalError == null;
return true;
}
/**
......@@ -290,50 +277,69 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
* @return The number of tracks.
*/
public int getTrackCount() {
return tracks.size();
return exposedVariants.length;
}
/**
* Returns the variant corresponding to the fixed track at the specified index, or null if the
* track at the specified index is adaptive.
* Returns the format of the track at the specified index.
* <p>
* This method should only be called after the source has been prepared.
*
* @param index The track index.
* @return The variant corresponding to the fixed track, or null if the track is adaptive.
*/
public Variant getFixedTrackVariant(int index) {
Variant[] variants = tracks.get(index).variants;
return variants.length == 1 ? variants[0] : null;
}
/**
* Returns the currently selected track index.
* <p>
* This method should only be called after the source has been prepared.
*
* @return The currently selected track index.
* @return The format of the track.
*/
public int getSelectedTrackIndex() {
return selectedTrackIndex;
public Format getTrackFormat(int index) {
return exposedVariants[index].format;
}
/**
* Selects a track for use.
* Selects a tracks for use.
* <p>
* This method should only be called after the source has been prepared.
*
* @param index The track index.
* @param tracks The track indices.
*/
public void selectTrack(int index) {
selectedTrackIndex = index;
ExposedTrack selectedTrack = tracks.get(selectedTrackIndex);
selectedVariantIndex = selectedTrack.defaultVariantIndex;
variants = selectedTrack.variants;
variantPlaylists = new HlsMediaPlaylist[variants.length];
variantLastPlaylistLoadTimesMs = new long[variants.length];
variantBlacklistTimes = new long[variants.length];
public void selectTracks(int[] tracks) {
enabledVariants = new Variant[tracks.length];
enabledVariantPlaylists = new HlsMediaPlaylist[enabledVariants.length];
enabledVariantLastPlaylistLoadTimesMs = new long[enabledVariants.length];
enabledVariantBlacklistTimes = new long[enabledVariants.length];
// Construct and sort the enabled variants.
for (int i = 0; i < tracks.length; i++) {
enabledVariants[i] = exposedVariants[tracks[i]];
}
Arrays.sort(enabledVariants, new Comparator<Variant>() {
private final Comparator<Format> formatComparator =
new Format.DecreasingBandwidthComparator();
@Override
public int compare(Variant first, Variant second) {
return formatComparator.compare(first.format, second.format);
}
});
// Determine the initial variant index and maximum video dimensions.
selectedVariantIndex = 0;
int maxWidth = -1;
int maxHeight = -1;
int minOriginalVariantIndex = Integer.MAX_VALUE;
for (int i = 0; i < enabledVariants.length; i++) {
int originalVariantIndex = masterPlaylist.variants.indexOf(enabledVariants[i]);
if (originalVariantIndex < minOriginalVariantIndex) {
minOriginalVariantIndex = originalVariantIndex;
selectedVariantIndex = i;
}
Format variantFormat = enabledVariants[i].format;
maxWidth = Math.max(variantFormat.width, maxWidth);
maxHeight = Math.max(variantFormat.height, maxHeight);
}
if (tracks.length > 1) {
// TODO: We should allow the default values to be passed through the constructor.
// TODO: Print a warning if resolution tags are omitted.
maxWidth = maxWidth > 0 ? maxWidth : 1920;
maxHeight = maxHeight > 0 ? maxHeight : 1080;
} else {
adaptiveMaxWidth = -1;
adaptiveMaxHeight = -1;
}
}
/**
......@@ -342,7 +348,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
* This method should only be called after the source has been prepared.
*/
public void seek() {
if (isMaster) {
if (type == TYPE_DEFAULT) {
timestampAdjusterProvider.reset();
}
}
......@@ -377,11 +383,11 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
} else {
nextVariantIndex = getNextVariantIndex(previousTsChunk, playbackPositionUs);
switchingVariantSpliced = previousTsChunk != null
&& !variants[nextVariantIndex].format.equals(previousTsChunk.format)
&& !enabledVariants[nextVariantIndex].format.equals(previousTsChunk.format)
&& adaptiveMode == ADAPTIVE_MODE_SPLICE;
}
HlsMediaPlaylist mediaPlaylist = variantPlaylists[nextVariantIndex];
HlsMediaPlaylist mediaPlaylist = enabledVariantPlaylists[nextVariantIndex];
if (mediaPlaylist == null) {
// We don't have the media playlist for the next variant. Request it now.
out.chunk = newMediaPlaylistChunk(nextVariantIndex);
......@@ -459,7 +465,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
}
long endTimeUs = startTimeUs + (long) (segment.durationSecs * C.MICROS_PER_SECOND);
int trigger = Chunk.TRIGGER_UNSPECIFIED;
Format format = variants[selectedVariantIndex].format;
Format format = enabledVariants[selectedVariantIndex].format;
// Configure the extractor that will read the chunk.
HlsExtractorWrapper extractorWrapper;
......@@ -477,7 +483,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
switchingVariantSpliced, MediaFormat.NO_VALUE, MediaFormat.NO_VALUE);
} else if (lastPathSegment.endsWith(WEBVTT_FILE_EXTENSION)
|| lastPathSegment.endsWith(VTT_FILE_EXTENSION)) {
PtsTimestampAdjuster timestampAdjuster = timestampAdjusterProvider.getAdjuster(isMaster,
PtsTimestampAdjuster timestampAdjuster = timestampAdjusterProvider.getAdjuster(false,
segment.discontinuitySequenceNumber, startTimeUs);
if (timestampAdjuster == null) {
// The master source has yet to instantiate an adjuster for the discontinuity sequence.
......@@ -492,16 +498,15 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
|| previousTsChunk.discontinuitySequenceNumber != segment.discontinuitySequenceNumber
|| !format.equals(previousTsChunk.format)) {
// MPEG-2 TS segments, but we need a new extractor.
PtsTimestampAdjuster timestampAdjuster = timestampAdjusterProvider.getAdjuster(isMaster,
PtsTimestampAdjuster timestampAdjuster = timestampAdjusterProvider.getAdjuster(true,
segment.discontinuitySequenceNumber, startTimeUs);
if (timestampAdjuster == null) {
// The master source has yet to instantiate an adjuster for the discontinuity sequence.
return;
}
ExposedTrack selectedTrack = tracks.get(selectedTrackIndex);
Extractor extractor = new TsExtractor(timestampAdjuster);
extractorWrapper = new HlsExtractorWrapper(trigger, format, startTimeUs, extractor,
switchingVariantSpliced, selectedTrack.adaptiveMaxWidth, selectedTrack.adaptiveMaxHeight);
switchingVariantSpliced, adaptiveMaxWidth, adaptiveMaxHeight);
} else {
// MPEG-2 TS segments, and we need to continue using the same extractor.
extractorWrapper = previousTsChunk.extractorWrapper;
......@@ -558,8 +563,8 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
EncryptionKeyChunk encryptionChunk = (EncryptionKeyChunk) chunk;
variantIndex = encryptionChunk.variantIndex;
}
boolean alreadyBlacklisted = variantBlacklistTimes[variantIndex] != 0;
variantBlacklistTimes[variantIndex] = SystemClock.elapsedRealtime();
boolean alreadyBlacklisted = enabledVariantBlacklistTimes[variantIndex] != 0;
enabledVariantBlacklistTimes[variantIndex] = SystemClock.elapsedRealtime();
if (alreadyBlacklisted) {
// The playlist was already blacklisted.
Log.w(TAG, "Already blacklisted variant (" + responseCode + "): "
......@@ -574,7 +579,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
// This was the last non-blacklisted playlist. Don't blacklist it.
Log.w(TAG, "Final variant not blacklisted (" + responseCode + "): "
+ chunk.dataSpec.uri);
variantBlacklistTimes[variantIndex] = 0;
enabledVariantBlacklistTimes[variantIndex] = 0;
return false;
}
}
......@@ -582,52 +587,68 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
return false;
}
// HlsTrackSelector.Output implementation.
// Private methods.
@Override
public void adaptiveTrack(HlsMasterPlaylist playlist, Variant[] variants) {
Arrays.sort(variants, new Comparator<Variant>() {
private final Comparator<Format> formatComparator =
new Format.DecreasingBandwidthComparator();
@Override
public int compare(Variant first, Variant second) {
return formatComparator.compare(first.format, second.format);
private void processMasterPlaylist(HlsMasterPlaylist playlist) {
if (type == TYPE_VTT) {
List<Variant> subtitleVariants = playlist.subtitles;
if (subtitleVariants != null) {
exposedVariants = new Variant[subtitleVariants.size()];
subtitleVariants.toArray(exposedVariants);
} else {
exposedVariants = new Variant[0];
}
});
int defaultVariantIndex = 0;
int maxWidth = -1;
int maxHeight = -1;
return;
}
int minOriginalVariantIndex = Integer.MAX_VALUE;
for (int i = 0; i < variants.length; i++) {
int originalVariantIndex = playlist.variants.indexOf(variants[i]);
if (originalVariantIndex < minOriginalVariantIndex) {
minOriginalVariantIndex = originalVariantIndex;
defaultVariantIndex = i;
// Type is TYPE_DEFAULT.
List<Variant> enabledVariantList = playlist.variants;
ArrayList<Variant> definiteVideoVariants = new ArrayList<>();
ArrayList<Variant> definiteAudioOnlyVariants = new ArrayList<>();
for (int i = 0; i < enabledVariantList.size(); i++) {
Variant variant = enabledVariantList.get(i);
if (variant.format.height > 0 || variantHasExplicitCodecWithPrefix(variant, "avc")) {
definiteVideoVariants.add(variant);
} else if (variantHasExplicitCodecWithPrefix(variant, "mp4a")) {
definiteAudioOnlyVariants.add(variant);
}
Format variantFormat = variants[i].format;
maxWidth = Math.max(variantFormat.width, maxWidth);
maxHeight = Math.max(variantFormat.height, maxHeight);
}
// TODO: We should allow the default values to be passed through the constructor.
// TODO: Print a warning if resolution tags are omitted.
maxWidth = maxWidth > 0 ? maxWidth : 1920;
maxHeight = maxHeight > 0 ? maxHeight : 1080;
tracks.add(new ExposedTrack(variants, defaultVariantIndex, maxWidth, maxHeight));
}
@Override
public void fixedTrack(HlsMasterPlaylist playlist, Variant variant) {
tracks.add(new ExposedTrack(variant));
if (!definiteVideoVariants.isEmpty()) {
// We've identified some variants as definitely containing video. Assume variants within the
// master playlist are marked consistently, and hence that we have the full set. Filter out
// any other variants, which are likely to be audio only.
enabledVariantList = definiteVideoVariants;
} else if (definiteAudioOnlyVariants.size() < enabledVariantList.size()) {
// We've identified some variants, but not all, as being audio only. Filter them out to leave
// the remaining variants, which are likely to contain video.
enabledVariantList.removeAll(definiteAudioOnlyVariants);
} else {
// Leave the enabled variants unchanged. They're likely either all video or all audio.
}
exposedVariants = new Variant[enabledVariantList.size()];
enabledVariantList.toArray(exposedVariants);
}
// Private methods.
private static boolean variantHasExplicitCodecWithPrefix(Variant variant, String prefix) {
String codecs = variant.format.codecs;
if (TextUtils.isEmpty(codecs)) {
return false;
}
String[] codecArray = codecs.split("(\\s*,\\s*)|(\\s*$)");
for (int i = 0; i < codecArray.length; i++) {
if (codecArray[i].startsWith(prefix)) {
return true;
}
}
return false;
}
private int getNextVariantIndex(TsChunk previousTsChunk, long playbackPositionUs) {
clearStaleBlacklistedVariants();
long bitrateEstimate = bandwidthMeter.getBitrateEstimate();
if (variantBlacklistTimes[selectedVariantIndex] != 0) {
if (enabledVariantBlacklistTimes[selectedVariantIndex] != 0) {
// The current variant has been blacklisted, so we have no choice but to re-evaluate.
return getVariantIndexForBandwidth(bitrateEstimate);
}
......@@ -649,7 +670,7 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
long bufferedPositionUs = adaptiveMode == ADAPTIVE_MODE_SPLICE ? previousTsChunk.startTimeUs
: previousTsChunk.endTimeUs;
long bufferedUs = bufferedPositionUs - playbackPositionUs;
if (variantBlacklistTimes[selectedVariantIndex] != 0
if (enabledVariantBlacklistTimes[selectedVariantIndex] != 0
|| (idealIndex > selectedVariantIndex && bufferedUs < maxBufferDurationToSwitchDownUs)
|| (idealIndex < selectedVariantIndex && bufferedUs > minBufferDurationToSwitchUpUs)) {
// Switch variant.
......@@ -666,9 +687,9 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
}
int effectiveBitrate = (int) (bitrateEstimate * BANDWIDTH_FRACTION);
int lowestQualityEnabledVariantIndex = -1;
for (int i = 0; i < variants.length; i++) {
if (variantBlacklistTimes[i] == 0) {
if (variants[i].format.bitrate <= effectiveBitrate) {
for (int i = 0; i < enabledVariants.length; i++) {
if (enabledVariantBlacklistTimes[i] == 0) {
if (enabledVariants[i].format.bitrate <= effectiveBitrate) {
return i;
}
lowestQualityEnabledVariantIndex = i;
......@@ -681,21 +702,21 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
private boolean shouldRerequestLiveMediaPlaylist(int nextVariantIndex) {
// Don't re-request media playlist more often than one-half of the target duration.
HlsMediaPlaylist mediaPlaylist = variantPlaylists[nextVariantIndex];
HlsMediaPlaylist mediaPlaylist = enabledVariantPlaylists[nextVariantIndex];
long timeSinceLastMediaPlaylistLoadMs =
SystemClock.elapsedRealtime() - variantLastPlaylistLoadTimesMs[nextVariantIndex];
SystemClock.elapsedRealtime() - enabledVariantLastPlaylistLoadTimesMs[nextVariantIndex];
return timeSinceLastMediaPlaylistLoadMs >= (mediaPlaylist.targetDurationSecs * 1000) / 2;
}
private int getLiveStartChunkMediaSequence(int variantIndex) {
// For live start playback from the third chunk from the end.
HlsMediaPlaylist mediaPlaylist = variantPlaylists[variantIndex];
HlsMediaPlaylist mediaPlaylist = enabledVariantPlaylists[variantIndex];
int chunkIndex = mediaPlaylist.segments.size() > 3 ? mediaPlaylist.segments.size() - 3 : 0;
return chunkIndex + mediaPlaylist.mediaSequence;
}
private MediaPlaylistChunk newMediaPlaylistChunk(int variantIndex) {
Uri mediaPlaylistUri = UriUtil.resolveToUri(baseUri, variants[variantIndex].url);
Uri mediaPlaylistUri = UriUtil.resolveToUri(baseUri, enabledVariants[variantIndex].url);
DataSpec dataSpec = new DataSpec(mediaPlaylistUri, 0, C.LENGTH_UNBOUNDED, null,
DataSpec.FLAG_ALLOW_GZIP);
return new MediaPlaylistChunk(dataSource, dataSpec, scratchSpace, playlistParser, variantIndex,
......@@ -735,15 +756,15 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
}
private void setMediaPlaylist(int variantIndex, HlsMediaPlaylist mediaPlaylist) {
variantLastPlaylistLoadTimesMs[variantIndex] = SystemClock.elapsedRealtime();
variantPlaylists[variantIndex] = mediaPlaylist;
enabledVariantLastPlaylistLoadTimesMs[variantIndex] = SystemClock.elapsedRealtime();
enabledVariantPlaylists[variantIndex] = mediaPlaylist;
live |= mediaPlaylist.live;
durationUs = live ? C.UNKNOWN_TIME_US : mediaPlaylist.durationUs;
}
private boolean allVariantsBlacklisted() {
for (int i = 0; i < variantBlacklistTimes.length; i++) {
if (variantBlacklistTimes[i] == 0) {
for (int i = 0; i < enabledVariantBlacklistTimes.length; i++) {
if (enabledVariantBlacklistTimes[i] == 0) {
return false;
}
}
......@@ -752,17 +773,17 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
private void clearStaleBlacklistedVariants() {
long currentTime = SystemClock.elapsedRealtime();
for (int i = 0; i < variantBlacklistTimes.length; i++) {
if (variantBlacklistTimes[i] != 0
&& currentTime - variantBlacklistTimes[i] > DEFAULT_PLAYLIST_BLACKLIST_MS) {
variantBlacklistTimes[i] = 0;
for (int i = 0; i < enabledVariantBlacklistTimes.length; i++) {
if (enabledVariantBlacklistTimes[i] != 0
&& currentTime - enabledVariantBlacklistTimes[i] > DEFAULT_PLAYLIST_BLACKLIST_MS) {
enabledVariantBlacklistTimes[i] = 0;
}
}
}
private int getVariantIndex(Format format) {
for (int i = 0; i < variants.length; i++) {
if (variants[i].format.equals(format)) {
for (int i = 0; i < enabledVariants.length; i++) {
if (enabledVariants[i].format.equals(format)) {
return i;
}
}
......@@ -772,31 +793,6 @@ public class HlsChunkSource implements HlsTrackSelector.Output {
// Private classes.
private static final class ExposedTrack {
private final Variant[] variants;
private final int defaultVariantIndex;
private final int adaptiveMaxWidth;
private final int adaptiveMaxHeight;
public ExposedTrack(Variant fixedVariant) {
this.variants = new Variant[] {fixedVariant};
this.defaultVariantIndex = 0;
this.adaptiveMaxWidth = MediaFormat.NO_VALUE;
this.adaptiveMaxHeight = MediaFormat.NO_VALUE;
}
public ExposedTrack(Variant[] adaptiveVariants, int defaultVariantIndex, int maxWidth,
int maxHeight) {
this.variants = adaptiveVariants;
this.defaultVariantIndex = defaultVariantIndex;
this.adaptiveMaxWidth = maxWidth;
this.adaptiveMaxHeight = maxHeight;
}
}
private static final class MediaPlaylistChunk extends DataChunk {
public final int variantIndex;
......
......@@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.chunk.BaseChunkSampleSourceEventListener;
import com.google.android.exoplayer.chunk.Chunk;
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
......@@ -55,9 +56,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
private static final long NO_RESET_PENDING = Long.MIN_VALUE;
private static final int PRIMARY_TYPE_NONE = 0;
private static final int PRIMARY_TYPE_TEXT = 1;
private static final int PRIMARY_TYPE_AUDIO = 2;
private static final int PRIMARY_TYPE_VIDEO = 3;
private static final int PRIMARY_TYPE_AUDIO = 1;
private static final int PRIMARY_TYPE_VIDEO = 2;
private final HlsChunkSource chunkSource;
private final LinkedList<HlsExtractorWrapper> extractors;
......@@ -72,24 +72,19 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
private boolean prepared;
private boolean loadControlRegistered;
private int trackCount;
private int enabledTrackCount;
private Format downstreamFormat;
// Tracks are complicated in HLS. See documentation of buildTracks for details.
// Indexed by track (as exposed by this source).
private MediaFormat[] trackFormats;
private boolean[] trackEnabledStates;
private TrackGroup[] trackGroups;
private int primaryTrackGroupIndex;
private int[] primarySelectedTracks;
// Indexed by group.
private boolean[] groupEnabledStates;
private boolean[] pendingResets;
private MediaFormat[] downstreamMediaFormats;
// Maps track index (as exposed by this source) to the corresponding chunk source track index for
// primary tracks, or to -1 otherwise.
private int[] chunkSourceTrackIndices;
// Maps track index (as exposed by this source) to the corresponding extractor track index.
private int[] extractorTrackIndices;
// Indexed by extractor track index.
private boolean[] extractorTrackEnabledStates;
private long downstreamPositionUs;
private long lastSeekPositionUs;
......@@ -186,23 +181,22 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
}
@Override
public int getTrackCount() {
Assertions.checkState(prepared);
return trackCount;
public int getTrackGroupCount() {
return trackGroups.length;
}
@Override
public MediaFormat getFormat(int track) {
public TrackGroup getTrackGroup(int group) {
Assertions.checkState(prepared);
return trackFormats[track];
return trackGroups[group];
}
@Override
public TrackStream enable(int track, long positionUs) {
public TrackStream enable(int group, int[] tracks, long positionUs) {
Assertions.checkState(prepared);
setTrackEnabledState(track, true);
downstreamMediaFormats[track] = null;
pendingResets[track] = false;
setTrackGroupEnabledState(group, true);
downstreamMediaFormats[group] = null;
pendingResets[group] = false;
downstreamFormat = null;
boolean wasLoadControlRegistered = loadControlRegistered;
if (!loadControlRegistered) {
......@@ -211,13 +205,13 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
}
// Treat enabling of a live stream as occurring at t=0 in both of the blocks below.
positionUs = chunkSource.isLive() ? 0 : positionUs;
int chunkSourceTrack = chunkSourceTrackIndices[track];
if (chunkSourceTrack != -1 && chunkSourceTrack != chunkSource.getSelectedTrackIndex()) {
if (group == primaryTrackGroupIndex && !Arrays.equals(tracks, primarySelectedTracks)) {
// This is a primary track whose corresponding chunk source track is different to the one
// currently selected. We need to change the selection and restart. Since other exposed tracks
// may be enabled too, we need to implement the restart as a seek so that all downstream
// renderers receive a discontinuity event.
chunkSource.selectTrack(chunkSourceTrack);
chunkSource.selectTracks(tracks);
primarySelectedTracks = tracks;
seekToInternal(positionUs);
} else if (enabledTrackCount == 1) {
lastSeekPositionUs = positionUs;
......@@ -232,12 +226,12 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
restartFrom(positionUs);
}
}
return new TrackStreamImpl(track);
return new TrackStreamImpl(group);
}
/* package */ void disable(int track) {
/* package */ void disable(int group) {
Assertions.checkState(prepared);
setTrackEnabledState(track, false);
setTrackGroupEnabledState(group, false);
if (enabledTrackCount == 0) {
chunkSource.reset();
downstreamPositionUs = Long.MIN_VALUE;
......@@ -267,8 +261,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
maybeStartLoading();
}
/* package */ boolean isReady(int track) {
Assertions.checkState(trackEnabledStates[track]);
/* package */ boolean isReady(int group) {
Assertions.checkState(groupEnabledStates[group]);
if (loadingFinished) {
return true;
}
......@@ -280,26 +274,26 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
if (!extractor.isPrepared()) {
break;
}
int extractorTrack = extractorTrackIndices[track];
if (extractor.hasSamples(extractorTrack)) {
if (extractor.hasSamples(group)) {
return true;
}
}
return false;
}
/* package */ long readReset(int track) {
if (pendingResets[track]) {
pendingResets[track] = false;
/* package */ long readReset(int group) {
if (pendingResets[group]) {
pendingResets[group] = false;
return lastSeekPositionUs;
}
return TrackStream.NO_RESET;
}
/* package */ int readData(int track, MediaFormatHolder formatHolder, SampleHolder sampleHolder) {
/* package */ int readData(int group, MediaFormatHolder formatHolder,
SampleHolder sampleHolder) {
Assertions.checkState(prepared);
if (pendingResets[track] || isPendingReset()) {
if (pendingResets[group] || isPendingReset()) {
return TrackStream.NOTHING_READ;
}
......@@ -320,9 +314,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
extractor.configureSpliceTo(extractors.get(1));
}
int extractorTrack = extractorTrackIndices[track];
int extractorIndex = 0;
while (extractors.size() > extractorIndex + 1 && !extractor.hasSamples(extractorTrack)) {
while (extractors.size() > extractorIndex + 1 && !extractor.hasSamples(group)) {
// We're finished reading from the extractor for this particular track, so advance to the
// next one for the current read.
extractor = extractors.get(++extractorIndex);
......@@ -331,14 +324,14 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
}
}
MediaFormat mediaFormat = extractor.getMediaFormat(extractorTrack);
if (mediaFormat != null && !mediaFormat.equals(downstreamMediaFormats[track])) {
MediaFormat mediaFormat = extractor.getMediaFormat(group);
if (mediaFormat != null && !mediaFormat.equals(downstreamMediaFormats[group])) {
formatHolder.format = mediaFormat;
downstreamMediaFormats[track] = mediaFormat;
downstreamMediaFormats[group] = mediaFormat;
return TrackStream.FORMAT_READ;
}
if (extractor.getSample(extractorTrack, sampleHolder)) {
if (extractor.getSample(group, sampleHolder)) {
boolean decodeOnly = sampleHolder.timeUs < lastSeekPositionUs;
sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0;
return TrackStream.SAMPLE_READ;
......@@ -499,8 +492,6 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
trackType = PRIMARY_TYPE_VIDEO;
} else if (MimeTypes.isAudio(mimeType)) {
trackType = PRIMARY_TYPE_AUDIO;
} else if (MimeTypes.isText(mimeType)) {
trackType = PRIMARY_TYPE_TEXT;
} else {
trackType = PRIMARY_TYPE_NONE;
}
......@@ -516,54 +507,39 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
// Calculate the number of tracks that will be exposed.
int chunkSourceTrackCount = chunkSource.getTrackCount();
boolean expandPrimaryExtractorTrack = primaryExtractorTrackIndex != -1;
trackCount = extractorTrackCount;
if (expandPrimaryExtractorTrack) {
trackCount += chunkSourceTrackCount - 1;
}
// Instantiate the necessary internal data-structures.
trackFormats = new MediaFormat[trackCount];
trackEnabledStates = new boolean[trackCount];
pendingResets = new boolean[trackCount];
downstreamMediaFormats = new MediaFormat[trackCount];
chunkSourceTrackIndices = new int[trackCount];
extractorTrackIndices = new int[trackCount];
extractorTrackEnabledStates = new boolean[extractorTrackCount];
// Construct the set of exposed tracks.
long durationUs = chunkSource.getDurationUs();
int trackIndex = 0;
primaryTrackGroupIndex = -1;
trackGroups = new TrackGroup[extractorTrackCount];
groupEnabledStates = new boolean[extractorTrackCount];
pendingResets = new boolean[extractorTrackCount];
downstreamMediaFormats = new MediaFormat[extractorTrackCount];
// Construct the set of exposed track groups.
for (int i = 0; i < extractorTrackCount; i++) {
MediaFormat format = extractor.getMediaFormat(i).copyWithDurationUs(durationUs);
MediaFormat format = extractor.getMediaFormat(i);
if (i == primaryExtractorTrackIndex) {
MediaFormat[] formats = new MediaFormat[chunkSourceTrackCount];
for (int j = 0; j < chunkSourceTrackCount; j++) {
extractorTrackIndices[trackIndex] = i;
chunkSourceTrackIndices[trackIndex] = j;
Variant fixedTrackVariant = chunkSource.getFixedTrackVariant(j);
trackFormats[trackIndex++] = fixedTrackVariant == null ? format.copyAsAdaptive(null)
: copyWithFixedTrackInfo(format, fixedTrackVariant.format);
formats[j] = copyWithFixedTrackInfo(format, chunkSource.getTrackFormat(j));
}
trackGroups[i] = new TrackGroup(true, formats);
primaryTrackGroupIndex = i;
} else {
extractorTrackIndices[trackIndex] = i;
chunkSourceTrackIndices[trackIndex] = -1;
trackFormats[trackIndex++] = format;
trackGroups[i] = new TrackGroup(format);
}
}
}
/**
* Enables or disables the track at a given index.
* Enables or disables a specified track group.
*
* @param track The index of the track.
* @param enabledState True if the track is being enabled, or false if it's being disabled.
* @param group The index of the track group.
* @param enabledState True if the group is being enabled, or false if it's being disabled.
*/
private void setTrackEnabledState(int track, boolean enabledState) {
Assertions.checkState(trackEnabledStates[track] != enabledState);
int extractorTrack = extractorTrackIndices[track];
Assertions.checkState(extractorTrackEnabledStates[extractorTrack] != enabledState);
trackEnabledStates[track] = enabledState;
extractorTrackEnabledStates[extractorTrack] = enabledState;
private void setTrackGroupEnabledState(int group, boolean enabledState) {
Assertions.checkState(groupEnabledStates[group] != enabledState);
groupEnabledStates[group] = enabledState;
enabledTrackCount = enabledTrackCount + (enabledState ? 1 : -1);
}
......@@ -619,8 +595,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
if (!extractor.isPrepared()) {
return;
}
for (int i = 0; i < extractorTrackEnabledStates.length; i++) {
if (!extractorTrackEnabledStates[i]) {
for (int i = 0; i < groupEnabledStates.length; i++) {
if (!groupEnabledStates[i]) {
extractor.discardUntil(i, timeUs);
}
}
......@@ -630,8 +606,8 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
if (!extractor.isPrepared()) {
return false;
}
for (int i = 0; i < extractorTrackEnabledStates.length; i++) {
if (extractorTrackEnabledStates[i] && extractor.hasSamples(i)) {
for (int i = 0; i < groupEnabledStates.length; i++) {
if (groupEnabledStates[i] && extractor.hasSamples(i)) {
return true;
}
}
......@@ -819,15 +795,15 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
private final class TrackStreamImpl implements TrackStream {
private final int track;
private final int group;
public TrackStreamImpl(int track) {
this.track = track;
public TrackStreamImpl(int group) {
this.group = group;
}
@Override
public boolean isReady() {
return HlsSampleSource.this.isReady(track);
return HlsSampleSource.this.isReady(group);
}
@Override
......@@ -837,17 +813,17 @@ public final class HlsSampleSource implements SampleSource, Loader.Callback {
@Override
public long readReset() {
return HlsSampleSource.this.readReset(track);
return HlsSampleSource.this.readReset(group);
}
@Override
public int readData(MediaFormatHolder formatHolder, SampleHolder sampleHolder) {
return HlsSampleSource.this.readData(track, formatHolder, sampleHolder);
return HlsSampleSource.this.readData(group, formatHolder, sampleHolder);
}
@Override
public void disable() {
HlsSampleSource.this.disable(track);
HlsSampleSource.this.disable(group);
}
}
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.hls;
import java.io.IOException;
/**
* Specifies a track selection from an {@link HlsMasterPlaylist}.
*/
public interface HlsTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified representations in the specified
* adaptation set.
*
* @param playlist The master playlist being processed.
* @param variants The variants to use for the adaptive track.
*/
void adaptiveTrack(HlsMasterPlaylist playlist, Variant[] variants);
/**
* Outputs an fixed track corresponding to the specified representation in the specified
* adaptation set.
*
* @param playlist The master playlist being processed.
* @param variant The variant to use for the track.
*/
void fixedTrack(HlsMasterPlaylist playlist, Variant variant);
}
/**
* Outputs a track selection for a given period.
*
* @param playlist The master playlist to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the period.
*/
void selectTracks(HlsMasterPlaylist playlist, Output output) throws IOException;
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.smoothstreaming;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.TrackElement;
import com.google.android.exoplayer.util.Util;
import android.content.Context;
import java.io.IOException;
import java.util.Arrays;
/**
* A default {@link SmoothStreamingTrackSelector} implementation.
*/
// TODO: Add more configuration options (e.g. ability to disable adaptive track output).
public final class DefaultSmoothStreamingTrackSelector implements SmoothStreamingTrackSelector {
private final int streamElementType;
private final Context context;
private final boolean filterVideoRepresentations;
private final boolean filterProtectedHdContent;
/**
* @param context A context. May be null if {@code filterVideoRepresentations == false}.
* @param filterVideoRepresentations Whether video representations should be filtered according to
* the capabilities of the device. It is strongly recommended to set this to {@code true},
* unless the application has already verified that all representations are playable.
* @param filterProtectedHdContent Whether video representations that are both drm protected and
* high definition should be filtered when tracks are built. If
* {@code filterVideoRepresentations == false} then this parameter is ignored.
*/
public static DefaultSmoothStreamingTrackSelector newVideoInstance(Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_VIDEO, context,
filterVideoRepresentations, filterProtectedHdContent);
}
public static DefaultSmoothStreamingTrackSelector newAudioInstance() {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_AUDIO, null, false, false);
}
public static DefaultSmoothStreamingTrackSelector newTextInstance() {
return new DefaultSmoothStreamingTrackSelector(StreamElement.TYPE_TEXT, null, false, false);
}
private DefaultSmoothStreamingTrackSelector(int streamElementType, Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
this.context = context;
this.streamElementType = streamElementType;
this.filterVideoRepresentations = filterVideoRepresentations;
this.filterProtectedHdContent = filterProtectedHdContent;
}
@Override
public void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException {
for (int i = 0; i < manifest.streamElements.length; i++) {
TrackElement[] tracks = manifest.streamElements[i].tracks;
if (manifest.streamElements[i].type == streamElementType) {
if (streamElementType == StreamElement.TYPE_VIDEO) {
int[] trackIndices;
if (filterVideoRepresentations) {
trackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, Arrays.asList(tracks), null,
filterProtectedHdContent && manifest.protectionElement != null);
} else {
trackIndices = Util.firstIntegersArray(tracks.length);
}
int trackCount = trackIndices.length;
if (trackCount > 1) {
output.adaptiveTrack(manifest, i, trackIndices);
}
for (int j = 0; j < trackCount; j++) {
output.fixedTrack(manifest, i, trackIndices[j]);
}
} else {
for (int j = 0; j < tracks.length; j++) {
output.fixedTrack(manifest, i, j);
}
}
}
}
}
}
......@@ -18,6 +18,7 @@ package com.google.android.exoplayer.smoothstreaming;
import com.google.android.exoplayer.BehindLiveWindowException;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TrackGroup;
import com.google.android.exoplayer.chunk.Chunk;
import com.google.android.exoplayer.chunk.ChunkExtractorWrapper;
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
......@@ -49,7 +50,6 @@ import android.util.Base64;
import android.util.SparseArray;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
......@@ -57,13 +57,13 @@ import java.util.List;
/**
* An {@link ChunkSource} for SmoothStreaming.
*/
public class SmoothStreamingChunkSource implements ChunkSource,
SmoothStreamingTrackSelector.Output {
// TODO[REFACTOR]: Handle multiple stream elements of the same type (at a higher level).
public class SmoothStreamingChunkSource implements ChunkSource {
private static final int MINIMUM_MANIFEST_REFRESH_PERIOD_MS = 5000;
private static final int INITIALIZATION_VECTOR_SIZE = 8;
private final SmoothStreamingTrackSelector trackSelector;
private final int streamElementType;
private final DataSource dataSource;
private final Evaluation evaluation;
private final long liveEdgeLatencyUs;
......@@ -73,20 +73,26 @@ public class SmoothStreamingChunkSource implements ChunkSource,
private final FormatEvaluator adaptiveFormatEvaluator;
private final boolean live;
// The tracks exposed by this source.
private final ArrayList<ExposedTrack> tracks;
// Mappings from manifest track key.
private final SparseArray<ChunkExtractorWrapper> extractorWrappers;
private final SparseArray<MediaFormat> mediaFormats;
private boolean prepareCalled;
private SmoothStreamingManifest currentManifest;
private int currentManifestChunkOffset;
private boolean needManifestRefresh;
private ExposedTrack enabledTrack;
private IOException fatalError;
// Properties of exposed tracks.
private int elementIndex;
private TrackGroup trackGroup;
private Format[] trackFormats;
// Properties of enabled tracks.
private Format[] enabledFormats;
private int adaptiveMaxWidth;
private int adaptiveMaxHeight;
// Mappings from manifest track key.
private final SparseArray<ChunkExtractorWrapper> extractorWrappers;
private final SparseArray<MediaFormat> mediaFormats;
/**
* Constructor to use for live streaming.
* <p>
......@@ -95,7 +101,9 @@ public class SmoothStreamingChunkSource implements ChunkSource,
*
* @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load.
* @param trackSelector Selects tracks from the manifest to be exposed by this source.
* @param streamElementType The type of stream element exposed by this source. One of
* {@link StreamElement#TYPE_VIDEO}, {@link StreamElement#TYPE_AUDIO} and
* {@link StreamElement#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
......@@ -105,9 +113,9 @@ public class SmoothStreamingChunkSource implements ChunkSource,
* Hence a small value may increase the probability of rebuffering and playback failures.
*/
public SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher,
SmoothStreamingTrackSelector trackSelector, DataSource dataSource,
FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs) {
this(manifestFetcher, manifestFetcher.getManifest(), trackSelector, dataSource,
int streamElementType, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
long liveEdgeLatencyMs) {
this(manifestFetcher, manifestFetcher.getManifest(), streamElementType, dataSource,
adaptiveFormatEvaluator, liveEdgeLatencyMs);
}
......@@ -115,27 +123,27 @@ public class SmoothStreamingChunkSource implements ChunkSource,
* Constructor to use for fixed duration content.
*
* @param manifest The manifest parsed from {@code baseUrl + "/Manifest"}.
* @param trackSelector Selects tracks from the manifest to be exposed by this source.
* @param streamElementType The type of stream element exposed by this source. One of
* {@link StreamElement#TYPE_VIDEO}, {@link StreamElement#TYPE_AUDIO} and
* {@link StreamElement#TYPE_TEXT}.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
*/
public SmoothStreamingChunkSource(SmoothStreamingManifest manifest,
SmoothStreamingTrackSelector trackSelector, DataSource dataSource,
FormatEvaluator adaptiveFormatEvaluator) {
this(null, manifest, trackSelector, dataSource, adaptiveFormatEvaluator, 0);
public SmoothStreamingChunkSource(SmoothStreamingManifest manifest, int streamElementType,
DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator) {
this(null, manifest, streamElementType, dataSource, adaptiveFormatEvaluator, 0);
}
private SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher,
SmoothStreamingManifest initialManifest, SmoothStreamingTrackSelector trackSelector,
DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs) {
SmoothStreamingManifest initialManifest, int streamElementType, DataSource dataSource,
FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs) {
this.manifestFetcher = manifestFetcher;
this.currentManifest = initialManifest;
this.trackSelector = trackSelector;
this.streamElementType = streamElementType;
this.dataSource = dataSource;
this.adaptiveFormatEvaluator = adaptiveFormatEvaluator;
this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000;
evaluation = new Evaluation();
tracks = new ArrayList<>();
extractorWrappers = new SparseArray<>();
mediaFormats = new SparseArray<>();
live = initialManifest.isLive;
......@@ -168,31 +176,35 @@ public class SmoothStreamingChunkSource implements ChunkSource,
@Override
public boolean prepare() {
if (!prepareCalled) {
selectTracks(currentManifest);
prepareCalled = true;
try {
trackSelector.selectTracks(currentManifest, this);
} catch (IOException e) {
fatalError = e;
}
}
return fatalError == null;
}
@Override
public int getTrackCount() {
return tracks.size();
return true;
}
@Override
public final MediaFormat getFormat(int track) {
return tracks.get(track).trackFormat;
public final TrackGroup getTracks() {
return trackGroup;
}
@Override
public void enable(int track) {
enabledTrack = tracks.get(track);
if (enabledTrack.isAdaptive()) {
public void enable(int[] tracks) {
int maxWidth = -1;
int maxHeight = -1;
enabledFormats = new Format[tracks.length];
for (int i = 0; i < tracks.length; i++) {
enabledFormats[i] = trackFormats[tracks[i]];
maxWidth = Math.max(enabledFormats[i].width, maxWidth);
maxHeight = Math.max(enabledFormats[i].height, maxHeight);
}
Arrays.sort(enabledFormats, new DecreasingBandwidthComparator());
if (enabledFormats.length > 1) {
adaptiveMaxWidth = maxWidth;
adaptiveMaxHeight = maxHeight;
adaptiveFormatEvaluator.enable();
} else {
adaptiveMaxWidth = -1;
adaptiveMaxHeight = -1;
}
if (manifestFetcher != null) {
manifestFetcher.enable();
......@@ -207,9 +219,9 @@ public class SmoothStreamingChunkSource implements ChunkSource,
SmoothStreamingManifest newManifest = manifestFetcher.getManifest();
if (currentManifest != newManifest && newManifest != null) {
StreamElement currentElement = currentManifest.streamElements[enabledTrack.elementIndex];
StreamElement currentElement = currentManifest.streamElements[elementIndex];
int currentElementChunkCount = currentElement.chunkCount;
StreamElement newElement = newManifest.streamElements[enabledTrack.elementIndex];
StreamElement newElement = newManifest.streamElements[elementIndex];
if (currentElementChunkCount == 0 || newElement.chunkCount == 0) {
// There's no overlap between the old and new elements because at least one is empty.
currentManifestChunkOffset += currentElementChunkCount;
......@@ -244,11 +256,10 @@ public class SmoothStreamingChunkSource implements ChunkSource,
}
evaluation.queueSize = queue.size();
if (enabledTrack.isAdaptive()) {
adaptiveFormatEvaluator.evaluate(queue, playbackPositionUs, enabledTrack.adaptiveFormats,
evaluation);
if (enabledFormats.length > 1) {
adaptiveFormatEvaluator.evaluate(queue, playbackPositionUs, enabledFormats, evaluation);
} else {
evaluation.format = enabledTrack.fixedFormat;
evaluation.format = enabledFormats[0];
evaluation.trigger = Chunk.TRIGGER_MANUAL;
}
......@@ -268,7 +279,7 @@ public class SmoothStreamingChunkSource implements ChunkSource,
// In all cases where we return before instantiating a new chunk, we want out.chunk to be null.
out.chunk = null;
StreamElement streamElement = currentManifest.streamElements[enabledTrack.elementIndex];
StreamElement streamElement = currentManifest.streamElements[elementIndex];
if (streamElement.chunkCount == 0) {
if (currentManifest.isLive) {
needManifestRefresh = true;
......@@ -315,12 +326,12 @@ public class SmoothStreamingChunkSource implements ChunkSource,
int currentAbsoluteChunkIndex = chunkIndex + currentManifestChunkOffset;
int manifestTrackIndex = getManifestTrackIndex(streamElement, selectedFormat);
int manifestTrackKey = getManifestTrackKey(enabledTrack.elementIndex, manifestTrackIndex);
int manifestTrackKey = getManifestTrackKey(elementIndex, manifestTrackIndex);
Uri uri = streamElement.buildRequestUri(manifestTrackIndex, chunkIndex);
Chunk mediaChunk = newMediaChunk(selectedFormat, uri, null,
extractorWrappers.get(manifestTrackKey), drmInitData, dataSource, currentAbsoluteChunkIndex,
chunkStartTimeUs, chunkEndTimeUs, evaluation.trigger, mediaFormats.get(manifestTrackKey),
enabledTrack.adaptiveMaxWidth, enabledTrack.adaptiveMaxHeight);
adaptiveMaxWidth, adaptiveMaxHeight);
out.chunk = mediaChunk;
}
......@@ -336,7 +347,7 @@ public class SmoothStreamingChunkSource implements ChunkSource,
@Override
public void disable(List<? extends MediaChunk> queue) {
if (enabledTrack.isAdaptive()) {
if (enabledFormats.length > 1) {
adaptiveFormatEvaluator.disable();
}
if (manifestFetcher != null) {
......@@ -346,43 +357,28 @@ public class SmoothStreamingChunkSource implements ChunkSource,
fatalError = null;
}
// SmoothStreamingTrackSelector.Output implementation.
// Private methods.
@Override
public void adaptiveTrack(SmoothStreamingManifest manifest, int element, int[] trackIndices) {
if (adaptiveFormatEvaluator == null) {
// Do nothing.
return;
}
MediaFormat maxHeightMediaFormat = null;
StreamElement streamElement = manifest.streamElements[element];
int maxWidth = -1;
int maxHeight = -1;
Format[] formats = new Format[trackIndices.length];
for (int i = 0; i < formats.length; i++) {
int manifestTrackIndex = trackIndices[i];
formats[i] = streamElement.tracks[manifestTrackIndex].format;
MediaFormat mediaFormat = initManifestTrack(manifest, element, manifestTrackIndex);
if (maxHeightMediaFormat == null || mediaFormat.height > maxHeight) {
maxHeightMediaFormat = mediaFormat;
private void selectTracks(SmoothStreamingManifest manifest) {
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == streamElementType) {
// We've found an element of the desired type.
elementIndex = i;
TrackElement[] trackElements = manifest.streamElements[i].tracks;
trackFormats = new Format[trackElements.length];
MediaFormat[] trackMediaFormats = new MediaFormat[trackElements.length];
for (int j = 0; j < trackMediaFormats.length; j++) {
trackFormats[j] = trackElements[j].format;
trackMediaFormats[j] = initManifestTrack(manifest, i, j);
}
trackGroup = new TrackGroup(adaptiveFormatEvaluator != null, trackMediaFormats);
return;
}
maxWidth = Math.max(maxWidth, mediaFormat.width);
maxHeight = Math.max(maxHeight, mediaFormat.height);
}
Arrays.sort(formats, new DecreasingBandwidthComparator());
MediaFormat adaptiveMediaFormat = maxHeightMediaFormat.copyAsAdaptive(null);
tracks.add(new ExposedTrack(adaptiveMediaFormat, element, formats, maxWidth, maxHeight));
trackGroup = new TrackGroup(adaptiveFormatEvaluator != null);
trackFormats = new Format[0];
}
@Override
public void fixedTrack(SmoothStreamingManifest manifest, int element, int trackIndex) {
MediaFormat mediaFormat = initManifestTrack(manifest, element, trackIndex);
Format format = manifest.streamElements[element].tracks[trackIndex].format;
tracks.add(new ExposedTrack(mediaFormat, element, format));
}
// Private methods.
private MediaFormat initManifestTrack(SmoothStreamingManifest manifest, int elementIndex,
int trackIndex) {
int manifestTrackKey = getManifestTrackKey(elementIndex, trackIndex);
......@@ -515,45 +511,4 @@ public class SmoothStreamingChunkSource implements ChunkSource,
data[secondPosition] = temp;
}
// Private classes.
private static final class ExposedTrack {
public final MediaFormat trackFormat;
private final int elementIndex;
// Non-adaptive track variables.
private final Format fixedFormat;
// Adaptive track variables.
private final Format[] adaptiveFormats;
private final int adaptiveMaxWidth;
private final int adaptiveMaxHeight;
public ExposedTrack(MediaFormat trackFormat, int elementIndex, Format fixedFormat) {
this.trackFormat = trackFormat;
this.elementIndex = elementIndex;
this.fixedFormat = fixedFormat;
this.adaptiveFormats = null;
this.adaptiveMaxWidth = MediaFormat.NO_VALUE;
this.adaptiveMaxHeight = MediaFormat.NO_VALUE;
}
public ExposedTrack(MediaFormat trackFormat, int elementIndex, Format[] adaptiveFormats,
int adaptiveMaxWidth, int adaptiveMaxHeight) {
this.trackFormat = trackFormat;
this.elementIndex = elementIndex;
this.adaptiveFormats = adaptiveFormats;
this.adaptiveMaxWidth = adaptiveMaxWidth;
this.adaptiveMaxHeight = adaptiveMaxHeight;
this.fixedFormat = null;
}
public boolean isAdaptive() {
return adaptiveFormats != null;
}
}
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.smoothstreaming;
import java.io.IOException;
/**
* Specifies a track selection from a {@link SmoothStreamingManifest}.
*/
public interface SmoothStreamingTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified tracks in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the adaptive tracks are located.
* @param tracks The indices of the tracks within the element.
*/
void adaptiveTrack(SmoothStreamingManifest manifest, int element, int[] tracks);
/**
* Outputs a fixed track corresponding to the specified track in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the track is located.
* @param track The index of the track within the element.
*/
void fixedTrack(SmoothStreamingManifest manifest, int element, int track);
}
/**
* Outputs a track selection for a given manifest.
*
* @param manifest The manifest to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the manifest.
*/
void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment