Commit ecd48da1 by Oliver Woodman

Multi-track support for DASH.

- With this change, you can select from the individual video formats in
the demo app, as well as the regular "auto" (adaptive) track.
- DashRendererBuilder no longer needs to create MultiTrackChunkSource
instances for the multiple tracks to be exposed.
parent 6cf261ae
...@@ -18,24 +18,18 @@ package com.google.android.exoplayer.demo.player; ...@@ -18,24 +18,18 @@ package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.DefaultLoadControl; import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl; import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer; import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer; import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackRenderer; import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.audio.AudioCapabilities; import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.chunk.ChunkSampleSource; import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.ChunkSource; import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.dash.DashChunkSource; import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.DefaultDashTrackSelector;
import com.google.android.exoplayer.dash.mpd.AdaptationSet; import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser;
import com.google.android.exoplayer.dash.mpd.Period; import com.google.android.exoplayer.dash.mpd.Period;
import com.google.android.exoplayer.dash.mpd.Representation;
import com.google.android.exoplayer.dash.mpd.UtcTimingElement; import com.google.android.exoplayer.dash.mpd.UtcTimingElement;
import com.google.android.exoplayer.dash.mpd.UtcTimingElementResolver; import com.google.android.exoplayer.dash.mpd.UtcTimingElementResolver;
import com.google.android.exoplayer.dash.mpd.UtcTimingElementResolver.UtcTimingCallback; import com.google.android.exoplayer.dash.mpd.UtcTimingElementResolver.UtcTimingCallback;
...@@ -44,8 +38,6 @@ import com.google.android.exoplayer.drm.MediaDrmCallback; ...@@ -44,8 +38,6 @@ import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager; import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
import com.google.android.exoplayer.drm.UnsupportedDrmException; import com.google.android.exoplayer.drm.UnsupportedDrmException;
import com.google.android.exoplayer.text.TextTrackRenderer; import com.google.android.exoplayer.text.TextTrackRenderer;
import com.google.android.exoplayer.text.ttml.TtmlParser;
import com.google.android.exoplayer.text.webvtt.WebvttParser;
import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DefaultAllocator; import com.google.android.exoplayer.upstream.DefaultAllocator;
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter; import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
...@@ -60,8 +52,6 @@ import android.os.Handler; ...@@ -60,8 +52,6 @@ import android.os.Handler;
import android.util.Log; import android.util.Log;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/** /**
* A {@link RendererBuilder} for DASH. * A {@link RendererBuilder} for DASH.
...@@ -194,23 +184,11 @@ public class DashRendererBuilder implements RendererBuilder { ...@@ -194,23 +184,11 @@ public class DashRendererBuilder implements RendererBuilder {
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
boolean hasContentProtection = false; boolean hasContentProtection = false;
int videoAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_VIDEO); for (int i = 0; i < period.adaptationSets.size(); i++) {
int audioAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_AUDIO); AdaptationSet adaptationSet = period.adaptationSets.get(i);
AdaptationSet videoAdaptationSet = null; if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
AdaptationSet audioAdaptationSet = null; hasContentProtection |= adaptationSet.hasContentProtection();
if (videoAdaptationSetIndex != -1) { }
videoAdaptationSet = period.adaptationSets.get(videoAdaptationSetIndex);
hasContentProtection |= videoAdaptationSet.hasContentProtection();
}
if (audioAdaptationSetIndex != -1) {
audioAdaptationSet = period.adaptationSets.get(audioAdaptationSetIndex);
hasContentProtection |= audioAdaptationSet.hasContentProtection();
}
// Fail if we have neither video or audio.
if (videoAdaptationSet == null && audioAdaptationSet == null) {
player.onRenderersError(new IllegalStateException("No video or audio adaptation sets"));
return;
} }
// Check drm support if necessary. // Check drm support if necessary.
...@@ -225,129 +203,49 @@ public class DashRendererBuilder implements RendererBuilder { ...@@ -225,129 +203,49 @@ public class DashRendererBuilder implements RendererBuilder {
try { try {
drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
filterHdContent = videoAdaptationSet != null && videoAdaptationSet.hasContentProtection() filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
&& getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
} catch (UnsupportedDrmException e) { } catch (UnsupportedDrmException e) {
player.onRenderersError(e); player.onRenderersError(e);
return; return;
} }
} }
// Determine which video representations we should use for playback.
int[] videoRepresentationIndices = null;
if (videoAdaptationSet != null) {
try {
videoRepresentationIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, videoAdaptationSet.representations, null, filterHdContent);
} catch (DecoderQueryException e) {
player.onRenderersError(e);
return;
}
}
// Build the video renderer. // Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer; DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
if (videoRepresentationIndices == null || videoRepresentationIndices.length == 0) { ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
videoRenderer = null; DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
} else { videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); elapsedRealtimeOffset, mainHandler, player);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
videoAdaptationSetIndex, videoRepresentationIndices, videoDataSource, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, DemoPlayer.TYPE_VIDEO);
mainHandler, player); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, mainHandler, player, 50);
DemoPlayer.TYPE_VIDEO);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
}
// Build the audio chunk sources.
List<ChunkSource> audioChunkSourceList = new ArrayList<>();
List<String> audioTrackNameList = new ArrayList<>();
if (audioAdaptationSet != null) {
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
List<Representation> audioRepresentations = audioAdaptationSet.representations;
List<String> codecs = new ArrayList<>();
for (int i = 0; i < audioRepresentations.size(); i++) {
Format format = audioRepresentations.get(i).format;
audioTrackNameList.add(format.id + " (" + format.audioChannels + "ch, " +
format.audioSamplingRate + "Hz)");
audioChunkSourceList.add(new DashChunkSource(manifestFetcher, audioAdaptationSetIndex,
new int[] {i}, audioDataSource, audioEvaluator, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player));
codecs.add(format.codecs);
}
}
// Build the audio renderer. // Build the audio renderer.
final String[] audioTrackNames; DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
final MultiTrackChunkSource audioChunkSource; ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
final TrackRenderer audioRenderer; DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
if (audioChunkSourceList.isEmpty()) { elapsedRealtimeOffset, mainHandler, player);
audioTrackNames = null; ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
audioChunkSource = null; AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
audioRenderer = null; DemoPlayer.TYPE_AUDIO);
} else { TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
audioTrackNames = new String[audioTrackNameList.size()]; drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context));
audioTrackNameList.toArray(audioTrackNames);
audioChunkSource = new MultiTrackChunkSource(audioChunkSourceList); // Build the text renderer.
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player, AudioCapabilities.getCapabilities(context));
}
// Build the text chunk sources.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator textEvaluator = new FormatEvaluator.FixedEvaluator(); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
List<ChunkSource> textChunkSourceList = new ArrayList<>(); DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
List<String> textTrackNameList = new ArrayList<>(); elapsedRealtimeOffset, mainHandler, player);
for (int i = 0; i < period.adaptationSets.size(); i++) { ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
AdaptationSet adaptationSet = period.adaptationSets.get(i); TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
if (adaptationSet.type == AdaptationSet.TYPE_TEXT) { DemoPlayer.TYPE_TEXT);
List<Representation> representations = adaptationSet.representations; TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
for (int j = 0; j < representations.size(); j++) { mainHandler.getLooper());
Representation representation = representations.get(j);
textTrackNameList.add(representation.format.id);
textChunkSourceList.add(new DashChunkSource(manifestFetcher, i, new int[] {j},
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
mainHandler, player));
}
}
}
// Build the text renderers
final String[] textTrackNames;
final MultiTrackChunkSource textChunkSource;
final TrackRenderer textRenderer;
if (textChunkSourceList.isEmpty()) {
textTrackNames = null;
textChunkSource = null;
textRenderer = null;
} else {
textTrackNames = new String[textTrackNameList.size()];
textTrackNameList.toArray(textTrackNames);
textChunkSource = new MultiTrackChunkSource(textChunkSourceList);
SampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper(),
new TtmlParser(), new WebvttParser());
}
// Invoke the callback. // Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
trackNames[DemoPlayer.TYPE_TEXT] = textTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
multiTrackChunkSources[DemoPlayer.TYPE_TEXT] = textChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
......
...@@ -342,7 +342,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi ...@@ -342,7 +342,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
player.release(); player.release();
} }
public int getPlaybackState() { public int getPlaybackState() {
if (rendererBuildingState == RENDERER_BUILDING_STATE_BUILDING) { if (rendererBuildingState == RENDERER_BUILDING_STATE_BUILDING) {
return STATE_PREPARING; return STATE_PREPARING;
......
...@@ -22,7 +22,6 @@ import com.google.android.exoplayer.SampleSource; ...@@ -22,7 +22,6 @@ import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackRenderer; import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.chunk.ChunkSampleSource; import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.ChunkSource; import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator; import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.dash.DashChunkSource; import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.mpd.AdaptationSet; import com.google.android.exoplayer.dash.mpd.AdaptationSet;
...@@ -109,7 +108,7 @@ public class DashRendererBuilder implements ManifestCallback<MediaPresentationDe ...@@ -109,7 +108,7 @@ public class DashRendererBuilder implements ManifestCallback<MediaPresentationDe
if (mimeType.equals(MimeTypes.VIDEO_WEBM)) { if (mimeType.equals(MimeTypes.VIDEO_WEBM)) {
videoChunkSource = new DashChunkSource(videoDataSource, videoChunkSource = new DashChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), manifest.getPeriodDuration(0), new AdaptiveEvaluator(bandwidthMeter), manifest.getPeriodDuration(0),
videoRepresentations); AdaptationSet.TYPE_VIDEO, videoRepresentations);
} else { } else {
throw new IllegalStateException("Unexpected mime type: " + mimeType); throw new IllegalStateException("Unexpected mime type: " + mimeType);
} }
...@@ -125,9 +124,8 @@ public class DashRendererBuilder implements ManifestCallback<MediaPresentationDe ...@@ -125,9 +124,8 @@ public class DashRendererBuilder implements ManifestCallback<MediaPresentationDe
audioRenderer = null; audioRenderer = null;
} else { } else {
DataSource audioDataSource = new DefaultUriDataSource(player, bandwidthMeter, userAgent); DataSource audioDataSource = new DefaultUriDataSource(player, bandwidthMeter, userAgent);
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator(); DashChunkSource audioChunkSource = new DashChunkSource(audioDataSource, null,
DashChunkSource audioChunkSource = new DashChunkSource(audioDataSource, audioEvaluator, manifest.getPeriodDuration(0), AdaptationSet.TYPE_AUDIO, audioRepresentation);
manifest.getPeriodDuration(0), audioRepresentation);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE); AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE);
if ("opus".equals(audioRepresentation.format.codecs)) { if ("opus".equals(audioRepresentation.format.codecs)) {
......
...@@ -21,8 +21,6 @@ import static org.mockito.Mockito.when; ...@@ -21,8 +21,6 @@ import static org.mockito.Mockito.when;
import com.google.android.exoplayer.TimeRange; import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.chunk.ChunkOperationHolder; import com.google.android.exoplayer.chunk.ChunkOperationHolder;
import com.google.android.exoplayer.chunk.Format; import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.FixedEvaluator;
import com.google.android.exoplayer.chunk.InitializationChunk; import com.google.android.exoplayer.chunk.InitializationChunk;
import com.google.android.exoplayer.chunk.MediaChunk; import com.google.android.exoplayer.chunk.MediaChunk;
import com.google.android.exoplayer.dash.mpd.AdaptationSet; import com.google.android.exoplayer.dash.mpd.AdaptationSet;
...@@ -53,8 +51,6 @@ import java.util.List; ...@@ -53,8 +51,6 @@ import java.util.List;
*/ */
public class DashChunkSourceTest extends InstrumentationTestCase { public class DashChunkSourceTest extends InstrumentationTestCase {
private static final FormatEvaluator EVALUATOR = new FixedEvaluator();
private static final long VOD_DURATION_MS = 30000; private static final long VOD_DURATION_MS = 30000;
private static final long LIVE_SEGMENT_COUNT = 5; private static final long LIVE_SEGMENT_COUNT = 5;
...@@ -85,8 +81,9 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -85,8 +81,9 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
} }
public void testGetAvailableRangeOnVod() { public void testGetAvailableRangeOnVod() {
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), AdaptationSet.TYPE_VIDEO, DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(),
null, null, mock(FormatEvaluator.class)); DefaultDashTrackSelector.newVideoInstance(null, false, false), null, null);
chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(0);
TimeRange availableRange = chunkSource.getAvailableRange(); TimeRange availableRange = chunkSource.getAvailableRange();
...@@ -106,7 +103,8 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -106,7 +103,8 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
public void testGetAvailableRangeOnMultiPeriodVod() { public void testGetAvailableRangeOnMultiPeriodVod() {
DashChunkSource chunkSource = new DashChunkSource(buildMultiPeriodVodMpd(), DashChunkSource chunkSource = new DashChunkSource(buildMultiPeriodVodMpd(),
AdaptationSet.TYPE_VIDEO, null, null, EVALUATOR); DefaultDashTrackSelector.newVideoInstance(null, false, false), null, null);
chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(0);
TimeRange availableRange = chunkSource.getAvailableRange(); TimeRange availableRange = chunkSource.getAvailableRange();
checkAvailableRange(availableRange, 0, MULTI_PERIOD_VOD_DURATION_MS * 1000); checkAvailableRange(availableRange, 0, MULTI_PERIOD_VOD_DURATION_MS * 1000);
...@@ -120,8 +118,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -120,8 +118,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
} }
public void testSegmentIndexInitializationOnVod() { public void testSegmentIndexInitializationOnVod() {
DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(), AdaptationSet.TYPE_VIDEO, DashChunkSource chunkSource = new DashChunkSource(buildVodMpd(),
null, mock(DataSource.class), EVALUATOR); DefaultDashTrackSelector.newVideoInstance(null, false, false), mock(DataSource.class),
null);
chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(0);
List<MediaChunk> queue = new ArrayList<>(); List<MediaChunk> queue = new ArrayList<>();
...@@ -232,7 +232,7 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -232,7 +232,7 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
private static MediaPresentationDescription buildMpd(long durationMs, private static MediaPresentationDescription buildMpd(long durationMs,
List<Representation> representations, boolean live, boolean limitTimeshiftBuffer) { List<Representation> representations, boolean live, boolean limitTimeshiftBuffer) {
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations); AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_VIDEO, representations);
Period period = new Period(null, 0, Collections.singletonList(adaptationSet)); Period period = new Period(null, 0, Collections.singletonList(adaptationSet));
return new MediaPresentationDescription(AVAILABILITY_START_TIME_MS, durationMs, -1, live, -1, return new MediaPresentationDescription(AVAILABILITY_START_TIME_MS, durationMs, -1, live, -1,
(limitTimeshiftBuffer) ? LIVE_TIMESHIFT_BUFFER_DEPTH_MS : -1, null, null, (limitTimeshiftBuffer) ? LIVE_TIMESHIFT_BUFFER_DEPTH_MS : -1, null, null,
...@@ -259,7 +259,7 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -259,7 +259,7 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
long periodDurationMs = VOD_DURATION_MS; long periodDurationMs = VOD_DURATION_MS;
for (int i = 0; i < 2; i++) { for (int i = 0; i < 2; i++) {
Representation representation = buildVodRepresentation(REGULAR_VIDEO); Representation representation = buildVodRepresentation(REGULAR_VIDEO);
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_VIDEO,
Collections.singletonList(representation)); Collections.singletonList(representation));
Period period = new Period(null, timeMs, Collections.singletonList(adaptationSet)); Period period = new Period(null, timeMs, Collections.singletonList(adaptationSet));
periods.add(period); periods.add(period);
...@@ -288,7 +288,7 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -288,7 +288,7 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
long periodDurationMs = LIVE_DURATION_MS; long periodDurationMs = LIVE_DURATION_MS;
for (int i = 0; i < MULTI_PERIOD_COUNT; i++) { for (int i = 0; i < MULTI_PERIOD_COUNT; i++) {
Representation representation = buildSegmentTimelineRepresentation(LIVE_DURATION_MS, 0); Representation representation = buildSegmentTimelineRepresentation(LIVE_DURATION_MS, 0);
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_VIDEO,
Collections.singletonList(representation)); Collections.singletonList(representation));
Period period = new Period(null, periodStartTimeMs, Collections.singletonList(adaptationSet)); Period period = new Period(null, periodStartTimeMs, Collections.singletonList(adaptationSet));
periods.add(period); periods.add(period);
...@@ -303,7 +303,7 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -303,7 +303,7 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
long periodDurationMs = LIVE_DURATION_MS; long periodDurationMs = LIVE_DURATION_MS;
for (int i = 0; i < MULTI_PERIOD_COUNT; i++) { for (int i = 0; i < MULTI_PERIOD_COUNT; i++) {
Representation representation = buildSegmentTemplateRepresentation(); Representation representation = buildSegmentTemplateRepresentation();
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_VIDEO,
Collections.singletonList(representation)); Collections.singletonList(representation));
Period period = new Period(null, periodStartTimeMs, Collections.singletonList(adaptationSet)); Period period = new Period(null, periodStartTimeMs, Collections.singletonList(adaptationSet));
periods.add(period); periods.add(period);
...@@ -322,9 +322,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase { ...@@ -322,9 +322,10 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
ManifestFetcher<MediaPresentationDescription> manifestFetcher = mock(ManifestFetcher.class); ManifestFetcher<MediaPresentationDescription> manifestFetcher = mock(ManifestFetcher.class);
when(manifestFetcher.getManifest()).thenReturn(mpd); when(manifestFetcher.getManifest()).thenReturn(mpd);
DashChunkSource chunkSource = new DashChunkSource(manifestFetcher, mpd, DashChunkSource chunkSource = new DashChunkSource(manifestFetcher, mpd,
AdaptationSet.TYPE_VIDEO, null, mock(DataSource.class), EVALUATOR, DefaultDashTrackSelector.newVideoInstance(null, false, false), mock(DataSource.class), null,
new FakeClock(mpd.availabilityStartTime + mpd.duration - ELAPSED_REALTIME_OFFSET_MS), new FakeClock(mpd.availabilityStartTime + mpd.duration - ELAPSED_REALTIME_OFFSET_MS),
liveEdgeLatencyMs * 1000, ELAPSED_REALTIME_OFFSET_MS * 1000, startAtLiveEdge, null, null); liveEdgeLatencyMs * 1000, ELAPSED_REALTIME_OFFSET_MS * 1000, startAtLiveEdge, null, null);
chunkSource.prepare();
chunkSource.enable(0); chunkSource.enable(0);
return chunkSource; return chunkSource;
} }
......
...@@ -18,6 +18,8 @@ package com.google.android.exoplayer.chunk; ...@@ -18,6 +18,8 @@ package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.ExoPlaybackException; import com.google.android.exoplayer.ExoPlaybackException;
import com.google.android.exoplayer.ExoPlayer.ExoPlayerComponent; import com.google.android.exoplayer.ExoPlayer.ExoPlayerComponent;
import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource;
import com.google.android.exoplayer.util.Assertions; import com.google.android.exoplayer.util.Assertions;
import java.io.IOException; import java.io.IOException;
...@@ -26,9 +28,12 @@ import java.util.List; ...@@ -26,9 +28,12 @@ import java.util.List;
/** /**
* A {@link ChunkSource} providing the ability to switch between multiple other {@link ChunkSource} * A {@link ChunkSource} providing the ability to switch between multiple other {@link ChunkSource}
* instances. * instances.
*
* @deprecated {@link DashChunkSource} and {@link SmoothStreamingChunkSource} both support multiple
* tracks directly, so use of this class should not be required. It will be deleted once legacy
* uses have been removed.
*/ */
// TODO: Expose multiple tracks directly in DashChunkSource and SmoothStreamingChunkSource, and @Deprecated
// delete this class.
public final class MultiTrackChunkSource implements ChunkSource, ExoPlayerComponent { public final class MultiTrackChunkSource implements ChunkSource, ExoPlayerComponent {
/** /**
......
...@@ -33,6 +33,7 @@ import com.google.android.exoplayer.chunk.FormatEvaluator.Evaluation; ...@@ -33,6 +33,7 @@ import com.google.android.exoplayer.chunk.FormatEvaluator.Evaluation;
import com.google.android.exoplayer.chunk.InitializationChunk; import com.google.android.exoplayer.chunk.InitializationChunk;
import com.google.android.exoplayer.chunk.MediaChunk; import com.google.android.exoplayer.chunk.MediaChunk;
import com.google.android.exoplayer.chunk.SingleSampleMediaChunk; import com.google.android.exoplayer.chunk.SingleSampleMediaChunk;
import com.google.android.exoplayer.dash.DashTrackSelector.Output;
import com.google.android.exoplayer.dash.mpd.AdaptationSet; import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.ContentProtection; import com.google.android.exoplayer.dash.mpd.ContentProtection;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription; import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
...@@ -41,7 +42,6 @@ import com.google.android.exoplayer.dash.mpd.RangedUri; ...@@ -41,7 +42,6 @@ import com.google.android.exoplayer.dash.mpd.RangedUri;
import com.google.android.exoplayer.dash.mpd.Representation; import com.google.android.exoplayer.dash.mpd.Representation;
import com.google.android.exoplayer.drm.DrmInitData; import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.extractor.ChunkIndex; import com.google.android.exoplayer.extractor.ChunkIndex;
import com.google.android.exoplayer.extractor.Extractor;
import com.google.android.exoplayer.extractor.mp4.FragmentedMp4Extractor; import com.google.android.exoplayer.extractor.mp4.FragmentedMp4Extractor;
import com.google.android.exoplayer.extractor.webm.WebmExtractor; import com.google.android.exoplayer.extractor.webm.WebmExtractor;
import com.google.android.exoplayer.upstream.DataSource; import com.google.android.exoplayer.upstream.DataSource;
...@@ -55,6 +55,7 @@ import android.os.Handler; ...@@ -55,6 +55,7 @@ import android.os.Handler;
import android.util.SparseArray; import android.util.SparseArray;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
...@@ -73,7 +74,7 @@ import java.util.List; ...@@ -73,7 +74,7 @@ import java.util.List;
* </ol> * </ol>
*/ */
// TODO: handle cases where the above assumption are false // TODO: handle cases where the above assumption are false
public class DashChunkSource implements ChunkSource { public class DashChunkSource implements ChunkSource, Output {
/** /**
* Interface definition for a callback to be notified of {@link DashChunkSource} events. * Interface definition for a callback to be notified of {@link DashChunkSource} events.
...@@ -100,38 +101,26 @@ public class DashChunkSource implements ChunkSource { ...@@ -100,38 +101,26 @@ public class DashChunkSource implements ChunkSource {
} }
/**
* Specifies that we should process all tracks.
*/
public static final int USE_ALL_TRACKS = -1;
private final Handler eventHandler; private final Handler eventHandler;
private final EventListener eventListener; private final EventListener eventListener;
private final MediaFormat trackFormat;
private final DataSource dataSource; private final DataSource dataSource;
private final FormatEvaluator formatEvaluator; private final FormatEvaluator adaptiveFormatEvaluator;
private final Evaluation evaluation; private final Evaluation evaluation;
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
private final DashTrackSelector trackSelector;
private final ArrayList<ExposedTrack> tracks;
private final SparseArray<PeriodHolder> periodHolders;
private final Clock systemClock; private final Clock systemClock;
private final long liveEdgeLatencyUs; private final long liveEdgeLatencyUs;
private final long elapsedRealtimeOffsetUs; private final long elapsedRealtimeOffsetUs;
private final int maxWidth;
private final int maxHeight;
private final long[] availableRangeValues; private final long[] availableRangeValues;
private final SparseArray<PeriodHolder> periodHolders;
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
private final int adaptationSetIndex;
private final int[] representationIndices;
private MediaPresentationDescription currentManifest; private MediaPresentationDescription currentManifest;
private ExposedTrack enabledTrack;
private int periodHolderNextIndex; private int nextPeriodHolderIndex;
private DrmInitData drmInitData;
private TimeRange availableRange; private TimeRange availableRange;
private boolean prepareCalled;
private boolean startAtLiveEdge; private boolean startAtLiveEdge;
private boolean lastChunkWasInitialization; private boolean lastChunkWasInitialization;
private IOException fatalError; private IOException fatalError;
...@@ -140,44 +129,49 @@ public class DashChunkSource implements ChunkSource { ...@@ -140,44 +129,49 @@ public class DashChunkSource implements ChunkSource {
* Lightweight constructor to use for fixed duration content. * Lightweight constructor to use for fixed duration content.
* *
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param durationMs The duration of the content. * @param durationMs The duration of the content.
* @param adaptationSetType The type of the adaptation set to which the representations belong.
* One of {@link AdaptationSet#TYPE_AUDIO}, {@link AdaptationSet#TYPE_VIDEO} and
* {@link AdaptationSet#TYPE_TEXT}.
* @param representations The representations to be considered by the source. * @param representations The representations to be considered by the source.
*/ */
public DashChunkSource(DataSource dataSource, FormatEvaluator formatEvaluator, public DashChunkSource(DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
long durationMs, Representation... representations) { long durationMs, int adaptationSetType, Representation... representations) {
this(buildManifest(durationMs, Arrays.asList(representations)), 0, null, dataSource, this(buildManifest(durationMs, adaptationSetType, Arrays.asList(representations)), null,
formatEvaluator); dataSource, adaptiveFormatEvaluator);
} }
/** /**
* Lightweight constructor to use for fixed duration content. * Lightweight constructor to use for fixed duration content.
* *
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param durationMs The duration of the content. * @param durationMs The duration of the content.
* @param adaptationSetType The type of the adaptation set to which the representations belong.
* One of {@link AdaptationSet#TYPE_AUDIO}, {@link AdaptationSet#TYPE_VIDEO} and
* {@link AdaptationSet#TYPE_TEXT}.
* @param representations The representations to be considered by the source. * @param representations The representations to be considered by the source.
*/ */
public DashChunkSource(DataSource dataSource, FormatEvaluator formatEvaluator, public DashChunkSource(DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
long durationMs, List<Representation> representations) { long durationMs, int adaptationSetType, List<Representation> representations) {
this(buildManifest(durationMs, representations), 0, null, dataSource, formatEvaluator); this(buildManifest(durationMs, adaptationSetType, representations),
DefaultDashTrackSelector.newVideoInstance(null, false, false), dataSource,
adaptiveFormatEvaluator);
} }
/** /**
* Constructor to use for fixed duration content. * Constructor to use for fixed duration content.
* *
* @param manifest The manifest. * @param manifest The manifest.
* @param adaptationSetIndex The index of the adaptation set that should be used. * @param trackSelector Selects tracks from manifest periods to be exposed by this source.
* @param representationIndices The indices of the representations within the adaptations set
* that should be used. May be null if all representations within the adaptation set should
* be considered.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
*/ */
public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetIndex, public DashChunkSource(MediaPresentationDescription manifest, DashTrackSelector trackSelector,
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator) { DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator) {
this(null, manifest, adaptationSetIndex, representationIndices, dataSource, formatEvaluator, this(null, manifest, trackSelector, dataSource, adaptiveFormatEvaluator, new SystemClock(), 0,
new SystemClock(), 0, 0, false, null, null); 0, false, null, null);
} }
/** /**
...@@ -188,12 +182,9 @@ public class DashChunkSource implements ChunkSource { ...@@ -188,12 +182,9 @@ public class DashChunkSource implements ChunkSource {
* *
* @param manifestFetcher A fetcher for the manifest, which must have already successfully * @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load. * completed an initial load.
* @param adaptationSetIndex The index of the adaptation set that should be used. * @param trackSelector Selects tracks from manifest periods to be exposed by this source.
* @param representationIndices The indices of the representations within the adaptations set
* that should be used. May be null if all representations within the adaptation set should
* be considered.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
* lag behind the "live edge" (i.e. the end of the most recently defined media in the * lag behind the "live edge" (i.e. the end of the most recently defined media in the
* manifest). Choosing a small value will minimize latency introduced by the player, however * manifest). Choosing a small value will minimize latency introduced by the player, however
...@@ -207,11 +198,11 @@ public class DashChunkSource implements ChunkSource { ...@@ -207,11 +198,11 @@ public class DashChunkSource implements ChunkSource {
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
*/ */
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
int adaptationSetIndex, int[] representationIndices, DataSource dataSource, DashTrackSelector trackSelector, DataSource dataSource,
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs,
Handler eventHandler, EventListener eventListener) { Handler eventHandler, EventListener eventListener) {
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices, this(manifestFetcher, manifestFetcher.getManifest(), trackSelector,
dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000, dataSource, adaptiveFormatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
elapsedRealtimeOffsetMs * 1000, true, eventHandler, eventListener); elapsedRealtimeOffsetMs * 1000, true, eventHandler, eventListener);
} }
...@@ -220,12 +211,9 @@ public class DashChunkSource implements ChunkSource { ...@@ -220,12 +211,9 @@ public class DashChunkSource implements ChunkSource {
* *
* @param manifestFetcher A fetcher for the manifest, which must have already successfully * @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load. * completed an initial load.
* @param adaptationSetIndex The index of the adaptation set that should be used. * @param trackSelector Selects tracks from manifest periods to be exposed by this source.
* @param representationIndices The indices of the representations within the adaptations set
* that should be used. May be null if all representations within the adaptation set should
* be considered.
* @param dataSource A {@link DataSource} suitable for loading the media data. * @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
* lag behind the "live edge" (i.e. the end of the most recently defined media in the * lag behind the "live edge" (i.e. the end of the most recently defined media in the
* manifest). Choosing a small value will minimize latency introduced by the player, however * manifest). Choosing a small value will minimize latency introduced by the player, however
...@@ -241,25 +229,24 @@ public class DashChunkSource implements ChunkSource { ...@@ -241,25 +229,24 @@ public class DashChunkSource implements ChunkSource {
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
*/ */
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
int adaptationSetIndex, int[] representationIndices, DataSource dataSource, DashTrackSelector trackSelector, DataSource dataSource,
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs,
boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener) { boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener) {
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices, this(manifestFetcher, manifestFetcher.getManifest(), trackSelector,
dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000, dataSource, adaptiveFormatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
elapsedRealtimeOffsetMs * 1000, startAtLiveEdge, eventHandler, eventListener); elapsedRealtimeOffsetMs * 1000, startAtLiveEdge, eventHandler, eventListener);
} }
/* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, /* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
MediaPresentationDescription initialManifest, int adaptationSetIndex, MediaPresentationDescription initialManifest, DashTrackSelector trackSelector,
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator,
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs, Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs,
boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener) { boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener) {
this.manifestFetcher = manifestFetcher; this.manifestFetcher = manifestFetcher;
this.currentManifest = initialManifest; this.currentManifest = initialManifest;
this.adaptationSetIndex = adaptationSetIndex; this.trackSelector = trackSelector;
this.representationIndices = representationIndices;
this.dataSource = dataSource; this.dataSource = dataSource;
this.formatEvaluator = formatEvaluator; this.adaptiveFormatEvaluator = adaptiveFormatEvaluator;
this.systemClock = systemClock; this.systemClock = systemClock;
this.liveEdgeLatencyUs = liveEdgeLatencyUs; this.liveEdgeLatencyUs = liveEdgeLatencyUs;
this.elapsedRealtimeOffsetUs = elapsedRealtimeOffsetUs; this.elapsedRealtimeOffsetUs = elapsedRealtimeOffsetUs;
...@@ -268,72 +255,54 @@ public class DashChunkSource implements ChunkSource { ...@@ -268,72 +255,54 @@ public class DashChunkSource implements ChunkSource {
this.eventListener = eventListener; this.eventListener = eventListener;
this.evaluation = new Evaluation(); this.evaluation = new Evaluation();
this.availableRangeValues = new long[2]; this.availableRangeValues = new long[2];
drmInitData = getDrmInitData(currentManifest, adaptationSetIndex);
periodHolders = new SparseArray<>(); periodHolders = new SparseArray<>();
tracks = new ArrayList<ExposedTrack>();
}
processManifest(currentManifest); // ChunkSource implementation.
long totalDurationUs = 0; @Override
int maxWidth = 0; public void maybeThrowError() throws IOException {
int maxHeight = 0; if (fatalError != null) {
String mimeType = ""; throw fatalError;
for (int i = 0; i < periodHolders.size(); i++) { } else if (manifestFetcher != null) {
PeriodHolder periodHolder = periodHolders.valueAt(i); manifestFetcher.maybeThrowError();
if (totalDurationUs != C.UNKNOWN_TIME_US) {
if (periodHolder.durationUs == C.UNKNOWN_TIME_US) {
totalDurationUs = C.UNKNOWN_TIME_US;
} else {
totalDurationUs += periodHolder.durationUs;
}
}
maxWidth = Math.max(maxWidth, periodHolder.maxWidth);
maxHeight = Math.max(maxHeight, periodHolder.maxHeight);
mimeType = periodHolder.mimeType;
} }
this.maxWidth = maxWidth == 0 ? MediaFormat.NO_VALUE : maxWidth;
this.maxHeight = maxHeight == 0 ? MediaFormat.NO_VALUE : maxHeight;
// TODO: Remove this and pass proper formats instead (b/22996976).
this.trackFormat = MediaFormat.createFormatForMimeType(mimeType, MediaFormat.NO_VALUE,
totalDurationUs);
} }
@Override @Override
public boolean prepare() { public boolean prepare() {
return true; if (!prepareCalled) {
prepareCalled = true;
try {
trackSelector.selectTracks(currentManifest, 0, this);
} catch (IOException e) {
fatalError = e;
}
}
return fatalError == null;
} }
@Override @Override
public int getTrackCount() { public int getTrackCount() {
return 1; return tracks.size();
} }
@Override @Override
public final MediaFormat getFormat(int track) { public final MediaFormat getFormat(int track) {
return trackFormat; return tracks.get(track).trackFormat;
}
// VisibleForTesting
/* package */ TimeRange getAvailableRange() {
return availableRange;
} }
@Override @Override
public void enable(int track) { public void enable(int track) {
fatalError = null; enabledTrack = tracks.get(track);
formatEvaluator.enable(); processManifest(currentManifest);
if (manifestFetcher != null) { if (enabledTrack.isAdaptive()) {
manifestFetcher.enable(); adaptiveFormatEvaluator.enable();
} }
}
@Override
public void disable(List<? extends MediaChunk> queue) {
formatEvaluator.disable();
if (manifestFetcher != null) { if (manifestFetcher != null) {
manifestFetcher.disable(); manifestFetcher.enable();
} }
availableRange = null;
} }
@Override @Override
...@@ -372,15 +341,15 @@ public class DashChunkSource implements ChunkSource { ...@@ -372,15 +341,15 @@ public class DashChunkSource implements ChunkSource {
evaluation.queueSize = queue.size(); evaluation.queueSize = queue.size();
if (evaluation.format == null || !lastChunkWasInitialization) { if (evaluation.format == null || !lastChunkWasInitialization) {
PeriodHolder periodHolder = null; if (enabledTrack.isAdaptive()) {
if (!queue.isEmpty()) { adaptiveFormatEvaluator.evaluate(queue, playbackPositionUs, enabledTrack.adaptiveFormats,
periodHolder = periodHolders.get(queue.get(queue.size() - 1).parentId); evaluation);
} } else {
if (periodHolder == null) { evaluation.format = enabledTrack.fixedFormat;
periodHolder = periodHolders.valueAt(0); evaluation.trigger = Chunk.TRIGGER_MANUAL;
} }
formatEvaluator.evaluate(queue, playbackPositionUs, periodHolder.formats, evaluation);
} }
Format selectedFormat = evaluation.format; Format selectedFormat = evaluation.format;
out.queueSize = evaluation.queueSize; out.queueSize = evaluation.queueSize;
...@@ -467,7 +436,6 @@ public class DashChunkSource implements ChunkSource { ...@@ -467,7 +436,6 @@ public class DashChunkSource implements ChunkSource {
RepresentationHolder representationHolder = RepresentationHolder representationHolder =
periodHolder.representationHolders.get(selectedFormat.id); periodHolder.representationHolders.get(selectedFormat.id);
Representation selectedRepresentation = representationHolder.representation; Representation selectedRepresentation = representationHolder.representation;
ChunkExtractorWrapper extractorWrapper = representationHolder.extractorWrapper;
RangedUri pendingInitializationUri = null; RangedUri pendingInitializationUri = null;
RangedUri pendingIndexUri = null; RangedUri pendingIndexUri = null;
...@@ -483,8 +451,8 @@ public class DashChunkSource implements ChunkSource { ...@@ -483,8 +451,8 @@ public class DashChunkSource implements ChunkSource {
if (pendingInitializationUri != null || pendingIndexUri != null) { if (pendingInitializationUri != null || pendingIndexUri != null) {
// We have initialization and/or index requests to make. // We have initialization and/or index requests to make.
Chunk initializationChunk = newInitializationChunk(pendingInitializationUri, pendingIndexUri, Chunk initializationChunk = newInitializationChunk(pendingInitializationUri, pendingIndexUri,
selectedRepresentation, extractorWrapper, dataSource, periodHolder.localIndex, selectedRepresentation, representationHolder.extractorWrapper, dataSource,
evaluation.trigger); periodHolder.localIndex, evaluation.trigger);
lastChunkWasInitialization = true; lastChunkWasInitialization = true;
out.chunk = initializationChunk; out.chunk = initializationChunk;
return; return;
...@@ -500,15 +468,6 @@ public class DashChunkSource implements ChunkSource { ...@@ -500,15 +468,6 @@ public class DashChunkSource implements ChunkSource {
} }
@Override @Override
public void maybeThrowError() throws IOException {
if (fatalError != null) {
throw fatalError;
} else if (manifestFetcher != null) {
manifestFetcher.maybeThrowError();
}
}
@Override
public void onChunkLoadCompleted(Chunk chunk) { public void onChunkLoadCompleted(Chunk chunk) {
if (chunk instanceof InitializationChunk) { if (chunk instanceof InitializationChunk) {
InitializationChunk initializationChunk = (InitializationChunk) chunk; InitializationChunk initializationChunk = (InitializationChunk) chunk;
...@@ -531,8 +490,8 @@ public class DashChunkSource implements ChunkSource { ...@@ -531,8 +490,8 @@ public class DashChunkSource implements ChunkSource {
// The null check avoids overwriting drmInitData obtained from the manifest with drmInitData // The null check avoids overwriting drmInitData obtained from the manifest with drmInitData
// obtained from the stream, as per DASH IF Interoperability Recommendations V3.0, 7.5.3. // obtained from the stream, as per DASH IF Interoperability Recommendations V3.0, 7.5.3.
if (drmInitData == null && initializationChunk.hasDrmInitData()) { if (periodHolder.drmInitData == null && initializationChunk.hasDrmInitData()) {
drmInitData = initializationChunk.getDrmInitData(); periodHolder.drmInitData = initializationChunk.getDrmInitData();
} }
} }
} }
...@@ -542,6 +501,105 @@ public class DashChunkSource implements ChunkSource { ...@@ -542,6 +501,105 @@ public class DashChunkSource implements ChunkSource {
// Do nothing. // Do nothing.
} }
@Override
public void disable(List<? extends MediaChunk> queue) {
if (enabledTrack.isAdaptive()) {
adaptiveFormatEvaluator.disable();
}
if (manifestFetcher != null) {
manifestFetcher.disable();
}
periodHolders.clear();
evaluation.format = null;
availableRange = null;
fatalError = null;
enabledTrack = null;
}
// DashTrackSelector.Output implementation.
@Override
public void adaptiveTrack(MediaPresentationDescription manifest, int periodIndex,
int adaptationSetIndex, int[] representationIndices) {
if (adaptiveFormatEvaluator == null) {
// Do nothing.
return;
}
AdaptationSet adaptationSet = manifest.getPeriod(periodIndex).adaptationSets.get(
adaptationSetIndex);
int maxWidth = 0;
int maxHeight = 0;
Format maxHeightRepresentationFormat = null;
Format[] representationFormats = new Format[representationIndices.length];
for (int i = 0; i < representationFormats.length; i++) {
Format format = adaptationSet.representations.get(representationIndices[i]).format;
if (maxHeightRepresentationFormat == null || format.height > maxHeight) {
maxHeightRepresentationFormat = format;
}
maxWidth = Math.max(maxWidth, format.width);
maxHeight = Math.max(maxHeight, format.height);
representationFormats[i] = format;
}
Arrays.sort(representationFormats, new DecreasingBandwidthComparator());
long trackDurationUs = manifest.dynamic ? C.UNKNOWN_TIME_US : manifest.duration * 1000;
MediaFormat trackFormat = buildTrackFormat(adaptationSet.type, maxHeightRepresentationFormat,
trackDurationUs).copyAsAdaptive();
tracks.add(new ExposedTrack(trackFormat, adaptationSetIndex, representationFormats, maxWidth,
maxHeight));
}
@Override
public void fixedTrack(MediaPresentationDescription manifest, int periodIndex,
int adaptationSetIndex, int representationIndex) {
List<AdaptationSet> adaptationSets = manifest.getPeriod(periodIndex).adaptationSets;
AdaptationSet adaptationSet = adaptationSets.get(adaptationSetIndex);
Format representationFormat = adaptationSet.representations.get(representationIndex).format;
MediaFormat trackFormat = buildTrackFormat(adaptationSet.type, representationFormat,
manifest.dynamic ? C.UNKNOWN_TIME_US : manifest.duration * 1000);
tracks.add(new ExposedTrack(trackFormat, adaptationSetIndex, representationFormat));
}
// Private methods.
// Visible for testing.
/* package */ TimeRange getAvailableRange() {
return availableRange;
}
private static MediaPresentationDescription buildManifest(long durationMs,
int adaptationSetType, List<Representation> representations) {
AdaptationSet adaptationSet = new AdaptationSet(0, adaptationSetType, representations);
Period period = new Period(null, 0, Collections.singletonList(adaptationSet));
return new MediaPresentationDescription(-1, durationMs, -1, false, -1, -1, null, null,
Collections.singletonList(period));
}
private static MediaFormat buildTrackFormat(int adaptationSetType, Format format,
long durationUs) {
switch (adaptationSetType) {
case AdaptationSet.TYPE_VIDEO:
return MediaFormat.createVideoFormat(getMediaMimeType(format), format.bitrate,
MediaFormat.NO_VALUE, durationUs, format.width, format.height, 0, null);
case AdaptationSet.TYPE_AUDIO:
return MediaFormat.createAudioFormat(getMediaMimeType(format), format.bitrate,
MediaFormat.NO_VALUE, durationUs, format.audioChannels, format.audioSamplingRate, null);
case AdaptationSet.TYPE_TEXT:
return MediaFormat.createTextFormat(getMediaMimeType(format), format.bitrate,
format.language, durationUs);
default:
throw new IllegalStateException("Invalid type: " + adaptationSetType);
}
}
private static String getMediaMimeType(Format format) {
String mimeType = format.mimeType;
if (MimeTypes.APPLICATION_MP4.equals(format.mimeType) && "stpp".equals(format.codecs)) {
return MimeTypes.APPLICATION_TTML;
}
// TODO: Use codecs to determine media mime type for other formats too.
return mimeType;
}
private static boolean mimeTypeIsWebm(String mimeType) { private static boolean mimeTypeIsWebm(String mimeType) {
return mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM); return mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM);
} }
...@@ -590,12 +648,13 @@ public class DashChunkSource implements ChunkSource { ...@@ -590,12 +648,13 @@ public class DashChunkSource implements ChunkSource {
boolean isMediaFormatFinal = (mediaFormat != null); boolean isMediaFormatFinal = (mediaFormat != null);
return new ContainerMediaChunk(dataSource, dataSpec, trigger, representation.format, return new ContainerMediaChunk(dataSource, dataSpec, trigger, representation.format,
startTimeUs, endTimeUs, segmentNum, isLastSegment, sampleOffsetUs, startTimeUs, endTimeUs, segmentNum, isLastSegment, sampleOffsetUs,
representationHolder.extractorWrapper, mediaFormat, maxWidth, maxHeight, drmInitData, representationHolder.extractorWrapper, mediaFormat, enabledTrack.adaptiveMaxWidth,
isMediaFormatFinal, periodHolder.localIndex); enabledTrack.adaptiveMaxHeight, periodHolder.drmInitData, isMediaFormatFinal,
periodHolder.localIndex);
} }
} }
private long getNowUs() { private long getNowUnixTimeUs() {
if (elapsedRealtimeOffsetUs != 0) { if (elapsedRealtimeOffsetUs != 0) {
return (systemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs; return (systemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs;
} else { } else {
...@@ -603,46 +662,6 @@ public class DashChunkSource implements ChunkSource { ...@@ -603,46 +662,6 @@ public class DashChunkSource implements ChunkSource {
} }
} }
private static String getMediaMimeType(Representation representation) {
String mimeType = representation.format.mimeType;
if (MimeTypes.APPLICATION_MP4.equals(representation.format.mimeType)
&& "stpp".equals(representation.format.codecs)) {
return MimeTypes.APPLICATION_TTML;
}
// TODO: Use codecs to determine media mime type for other formats too.
return mimeType;
}
private static DrmInitData getDrmInitData(MediaPresentationDescription manifest,
int adaptationSetIndex) {
AdaptationSet adaptationSet = manifest.getPeriod(0).adaptationSets.get(adaptationSetIndex);
String drmInitMimeType = mimeTypeIsWebm(adaptationSet.representations.get(0).format.mimeType)
? MimeTypes.VIDEO_WEBM : MimeTypes.VIDEO_MP4;
if (adaptationSet.contentProtections.isEmpty()) {
return null;
} else {
DrmInitData.Mapped drmInitData = null;
for (int i = 0; i < adaptationSet.contentProtections.size(); i++) {
ContentProtection contentProtection = adaptationSet.contentProtections.get(i);
if (contentProtection.uuid != null && contentProtection.data != null) {
if (drmInitData == null) {
drmInitData = new DrmInitData.Mapped(drmInitMimeType);
}
drmInitData.put(contentProtection.uuid, contentProtection.data);
}
}
return drmInitData;
}
}
private static MediaPresentationDescription buildManifest(long durationMs,
List<Representation> representations) {
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations);
Period period = new Period(null, 0, Collections.singletonList(adaptationSet));
return new MediaPresentationDescription(-1, durationMs, -1, false, -1, -1, null, null,
Collections.singletonList(period));
}
private PeriodHolder findPeriodHolder(long positionUs) { private PeriodHolder findPeriodHolder(long positionUs) {
// if positionUs is before the first period, return the first period // if positionUs is before the first period, return the first period
if (positionUs < periodHolders.valueAt(0).getAvailableStartTimeUs()) { if (positionUs < periodHolders.valueAt(0).getAvailableStartTimeUs()) {
...@@ -670,10 +689,15 @@ public class DashChunkSource implements ChunkSource { ...@@ -670,10 +689,15 @@ public class DashChunkSource implements ChunkSource {
periodHolders.remove(periodHolder.localIndex); periodHolders.remove(periodHolder.localIndex);
} }
// Update existing periods. // Update existing periods. Only the first and last periods can change.
try { try {
for (int i = 0; i < periodHolders.size(); i++) { int periodHolderCount = periodHolders.size();
periodHolders.valueAt(i).updatePeriod(manifest, i); if (periodHolderCount > 0) {
periodHolders.valueAt(0).updatePeriod(manifest, 0, enabledTrack);
if (periodHolderCount > 1) {
int lastIndex = periodHolderCount - 1;
periodHolders.valueAt(lastIndex).updatePeriod(manifest, lastIndex, enabledTrack);
}
} }
} catch (BehindLiveWindowException e) { } catch (BehindLiveWindowException e) {
fatalError = e; fatalError = e;
...@@ -682,14 +706,13 @@ public class DashChunkSource implements ChunkSource { ...@@ -682,14 +706,13 @@ public class DashChunkSource implements ChunkSource {
// Add new periods. // Add new periods.
for (int i = periodHolders.size(); i < manifest.getPeriodCount(); i++) { for (int i = periodHolders.size(); i < manifest.getPeriodCount(); i++) {
PeriodHolder periodHolder = new PeriodHolder(periodHolderNextIndex, manifest, i, PeriodHolder holder = new PeriodHolder(nextPeriodHolderIndex, manifest, i, enabledTrack);
adaptationSetIndex, representationIndices); periodHolders.put(nextPeriodHolderIndex, holder);
periodHolders.put(periodHolderNextIndex, periodHolder); nextPeriodHolderIndex++;
periodHolderNextIndex++;
} }
// Update the available range. // Update the available range.
TimeRange newAvailableRange = getAvailableRange(getNowUs()); TimeRange newAvailableRange = getAvailableRange(getNowUnixTimeUs());
if (availableRange == null || !availableRange.equals(newAvailableRange)) { if (availableRange == null || !availableRange.equals(newAvailableRange)) {
availableRange = newAvailableRange; availableRange = newAvailableRange;
notifyAvailableRangeChanged(availableRange); notifyAvailableRangeChanged(availableRange);
...@@ -698,7 +721,7 @@ public class DashChunkSource implements ChunkSource { ...@@ -698,7 +721,7 @@ public class DashChunkSource implements ChunkSource {
currentManifest = manifest; currentManifest = manifest;
} }
private TimeRange getAvailableRange(long nowUs) { private TimeRange getAvailableRange(long nowUnixTimeUs) {
PeriodHolder firstPeriod = periodHolders.valueAt(0); PeriodHolder firstPeriod = periodHolders.valueAt(0);
PeriodHolder lastPeriod = periodHolders.valueAt(periodHolders.size() - 1); PeriodHolder lastPeriod = periodHolders.valueAt(periodHolders.size() - 1);
...@@ -711,7 +734,7 @@ public class DashChunkSource implements ChunkSource { ...@@ -711,7 +734,7 @@ public class DashChunkSource implements ChunkSource {
long maxEndPositionUs = lastPeriod.isIndexUnbounded() ? Long.MAX_VALUE long maxEndPositionUs = lastPeriod.isIndexUnbounded() ? Long.MAX_VALUE
: lastPeriod.getAvailableEndTimeUs(); : lastPeriod.getAvailableEndTimeUs();
long elapsedRealtimeAtZeroUs = (systemClock.elapsedRealtime() * 1000) long elapsedRealtimeAtZeroUs = (systemClock.elapsedRealtime() * 1000)
- (nowUs - (currentManifest.availabilityStartTime * 1000)); - (nowUnixTimeUs - (currentManifest.availabilityStartTime * 1000));
long timeShiftBufferDepthUs = currentManifest.timeShiftBufferDepth == -1 ? -1 long timeShiftBufferDepthUs = currentManifest.timeShiftBufferDepth == -1 ? -1
: currentManifest.timeShiftBufferDepth * 1000; : currentManifest.timeShiftBufferDepth * 1000;
return new DynamicTimeRange(minStartPositionUs, maxEndPositionUs, elapsedRealtimeAtZeroUs, return new DynamicTimeRange(minStartPositionUs, maxEndPositionUs, elapsedRealtimeAtZeroUs,
...@@ -729,6 +752,47 @@ public class DashChunkSource implements ChunkSource { ...@@ -729,6 +752,47 @@ public class DashChunkSource implements ChunkSource {
} }
} }
// Private classes.
private static final class ExposedTrack {
public final MediaFormat trackFormat;
private final int adaptationSetIndex;
// Non-adaptive track variables.
private final Format fixedFormat;
// Adaptive track variables.
private final Format[] adaptiveFormats;
private final int adaptiveMaxWidth;
private final int adaptiveMaxHeight;
public ExposedTrack(MediaFormat trackFormat, int adaptationSetIndex, Format fixedFormat) {
this.trackFormat = trackFormat;
this.adaptationSetIndex = adaptationSetIndex;
this.fixedFormat = fixedFormat;
this.adaptiveFormats = null;
this.adaptiveMaxWidth = -1;
this.adaptiveMaxHeight = -1;
}
public ExposedTrack(MediaFormat trackFormat, int adaptationSetIndex, Format[] adaptiveFormats,
int maxWidth, int maxHeight) {
this.trackFormat = trackFormat;
this.adaptationSetIndex = adaptationSetIndex;
this.adaptiveFormats = adaptiveFormats;
this.adaptiveMaxWidth = maxWidth;
this.adaptiveMaxHeight = maxHeight;
this.fixedFormat = null;
}
public boolean isAdaptive() {
return adaptiveFormats != null;
}
}
private static final class RepresentationHolder { private static final class RepresentationHolder {
public final ChunkExtractorWrapper extractorWrapper; public final ChunkExtractorWrapper extractorWrapper;
...@@ -738,16 +802,18 @@ public class DashChunkSource implements ChunkSource { ...@@ -738,16 +802,18 @@ public class DashChunkSource implements ChunkSource {
public MediaFormat mediaFormat; public MediaFormat mediaFormat;
private final long periodStartTimeUs; private final long periodStartTimeUs;
private long periodDurationUs;
private long periodDurationUs;
private int segmentNumShift; private int segmentNumShift;
public RepresentationHolder(long periodStartTimeUs, long periodDurationUs, public RepresentationHolder(long periodStartTimeUs, long periodDurationUs,
Representation representation, ChunkExtractorWrapper extractorWrapper) { Representation representation) {
this.periodStartTimeUs = periodStartTimeUs; this.periodStartTimeUs = periodStartTimeUs;
this.periodDurationUs = periodDurationUs; this.periodDurationUs = periodDurationUs;
this.representation = representation; this.representation = representation;
this.extractorWrapper = extractorWrapper; extractorWrapper = MimeTypes.TEXT_VTT.equals(representation.format.mimeType) ? null
: new ChunkExtractorWrapper(mimeTypeIsWebm(representation.format.mimeType)
? new WebmExtractor() : new FragmentedMp4Extractor());
segmentIndex = representation.getIndex(); segmentIndex = representation.getIndex();
} }
...@@ -775,15 +841,15 @@ public class DashChunkSource implements ChunkSource { ...@@ -775,15 +841,15 @@ public class DashChunkSource implements ChunkSource {
int newIndexFirstSegmentNum = newIndex.getFirstSegmentNum(); int newIndexFirstSegmentNum = newIndex.getFirstSegmentNum();
long newIndexStartTimeUs = newIndex.getTimeUs(newIndexFirstSegmentNum); long newIndexStartTimeUs = newIndex.getTimeUs(newIndexFirstSegmentNum);
if (oldIndexEndTimeUs == newIndexStartTimeUs) { if (oldIndexEndTimeUs == newIndexStartTimeUs) {
// The new manifest continues where the old one ended, with no overlap. // The new index continues where the old one ended, with no overlap.
segmentNumShift += oldIndex.getLastSegmentNum(periodDurationUs) + 1 segmentNumShift += oldIndex.getLastSegmentNum(periodDurationUs) + 1
- newIndexFirstSegmentNum; - newIndexFirstSegmentNum;
} else if (oldIndexEndTimeUs < newIndexStartTimeUs) { } else if (oldIndexEndTimeUs < newIndexStartTimeUs) {
// There's a gap between the old manifest and the new one which means we've slipped // There's a gap between the old index and the new one which means we've slipped behind the
// behind the live window and can't proceed. // live window and can't proceed.
throw new BehindLiveWindowException(); throw new BehindLiveWindowException();
} else { } else {
// The new manifest overlaps with the old one. // The new index overlaps with the old one.
segmentNumShift += oldIndex.getSegmentNum(newIndexStartTimeUs, periodDurationUs) segmentNumShift += oldIndex.getSegmentNum(newIndexStartTimeUs, periodDurationUs)
- newIndexFirstSegmentNum; - newIndexFirstSegmentNum;
} }
...@@ -823,77 +889,65 @@ public class DashChunkSource implements ChunkSource { ...@@ -823,77 +889,65 @@ public class DashChunkSource implements ChunkSource {
public final int localIndex; public final int localIndex;
public final long startTimeUs; public final long startTimeUs;
public final long durationUs;
public final String mimeType;
public final Format[] formats;
public final HashMap<String, RepresentationHolder> representationHolders; public final HashMap<String, RepresentationHolder> representationHolders;
private final int adaptationSetIndex;
private final int[] representationIndices; private final int[] representationIndices;
private final int maxWidth;
private final int maxHeight; private DrmInitData drmInitData;
private boolean indexIsUnbounded; private boolean indexIsUnbounded;
private boolean indexIsExplicit; private boolean indexIsExplicit;
private long availableStartTimeUs; private long availableStartTimeUs;
private long availableEndTimeUs; private long availableEndTimeUs;
public PeriodHolder(int localIndex, MediaPresentationDescription manifest, public PeriodHolder(int localIndex, MediaPresentationDescription manifest, int manifestIndex,
int manifestIndex, int adaptationSetIndex, int[] representationIndices) { ExposedTrack selectedTrack) {
this.localIndex = localIndex; this.localIndex = localIndex;
this.adaptationSetIndex = adaptationSetIndex;
this.representationIndices = representationIndices;
Period period = manifest.getPeriod(manifestIndex); Period period = manifest.getPeriod(manifestIndex);
long periodDurationUs = getPeriodDurationUs(manifest, manifestIndex);
AdaptationSet adaptationSet = period.adaptationSets.get(selectedTrack.adaptationSetIndex);
List<Representation> representations = adaptationSet.representations;
startTimeUs = period.startMs * 1000; startTimeUs = period.startMs * 1000;
durationUs = getPeriodDurationUs(manifest, manifestIndex); drmInitData = getDrmInitData(adaptationSet);
List<Representation> periodRepresentations = if (!selectedTrack.isAdaptive()) {
period.adaptationSets.get(adaptationSetIndex).representations; representationIndices = new int[] {
int representationCount = representationIndices != null ? representationIndices.length getRepresentationIndex(representations, selectedTrack.fixedFormat.id)};
: periodRepresentations.size(); } else {
formats = new Format[representationCount]; representationIndices = new int[selectedTrack.adaptiveFormats.length];
representationHolders = new HashMap<>(representationCount); for (int j = 0; j < selectedTrack.adaptiveFormats.length; j++) {
representationIndices[j] = getRepresentationIndex(
int maxWidth = 0; representations, selectedTrack.adaptiveFormats[j].id);
int maxHeight = 0; }
String mimeType = "";
for (int i = 0; i < representationCount; i++) {
int representationIndex = representationIndices != null ? representationIndices[i] : i;
Representation representation = periodRepresentations.get(representationIndex);
formats[i] = representation.format;
mimeType = getMediaMimeType(representation);
maxWidth = Math.max(formats[i].width, maxWidth);
maxHeight = Math.max(formats[i].height, maxHeight);
Extractor extractor = mimeTypeIsWebm(formats[i].mimeType) ? new WebmExtractor()
: new FragmentedMp4Extractor();
RepresentationHolder representationHolder = new RepresentationHolder(startTimeUs,
durationUs, representation, new ChunkExtractorWrapper(extractor));
representationHolders.put(formats[i].id, representationHolder);
} }
this.maxWidth = maxWidth;
this.maxHeight = maxHeight;
this.mimeType = mimeType;
Arrays.sort(formats, new DecreasingBandwidthComparator()); representationHolders = new HashMap<>();
updateRepresentationIndependentProperties(); for (int i = 0; i < representationIndices.length; i++) {
Representation representation = representations.get(representationIndices[i]);
RepresentationHolder representationHolder = new RepresentationHolder(startTimeUs,
periodDurationUs, representation);
representationHolders.put(representation.format.id, representationHolder);
}
updateRepresentationIndependentProperties(periodDurationUs,
representations.get(representationIndices[0]));
} }
public void updatePeriod(MediaPresentationDescription manifest, int manifestIndex) public void updatePeriod(MediaPresentationDescription manifest, int manifestIndex,
throws BehindLiveWindowException { ExposedTrack selectedTrack) throws BehindLiveWindowException {
Period period = manifest.getPeriod(manifestIndex); Period period = manifest.getPeriod(manifestIndex);
long durationUs = getPeriodDurationUs(manifest, manifestIndex); long periodDurationUs = getPeriodDurationUs(manifest, manifestIndex);
List<Representation> representations = period.adaptationSets
List<Representation> representations = .get(selectedTrack.adaptationSetIndex).representations;
period.adaptationSets.get(adaptationSetIndex).representations;
int representationCount = formats.length; for (int j = 0; j < representationIndices.length; j++) {
for (int i = 0; i < representationCount; i++) { Representation representation = representations.get(representationIndices[j]);
int representationIndex = representationIndices != null ? representationIndices[i] : i; representationHolders.get(representation.format.id).updateRepresentation(periodDurationUs,
Representation representation = representations.get(representationIndex);
representationHolders.get(representation.format.id).updateRepresentation(durationUs,
representation); representation);
} }
updateRepresentationIndependentProperties(); updateRepresentationIndependentProperties(periodDurationUs,
representations.get(representationIndices[0]));
} }
public long getAvailableStartTimeUs() { public long getAvailableStartTimeUs() {
...@@ -915,25 +969,57 @@ public class DashChunkSource implements ChunkSource { ...@@ -915,25 +969,57 @@ public class DashChunkSource implements ChunkSource {
return indexIsExplicit; return indexIsExplicit;
} }
private void updateRepresentationIndependentProperties() { // Private methods.
// Arbitrarily use the first representation to derive representation independent properties.
Representation representation = representationHolders.get(formats[0].id).representation; private void updateRepresentationIndependentProperties(long periodDurationUs,
DashSegmentIndex segmentIndex = representation.getIndex(); Representation arbitaryRepresentation) {
DashSegmentIndex segmentIndex = arbitaryRepresentation.getIndex();
if (segmentIndex != null) { if (segmentIndex != null) {
int firstSegmentNum = segmentIndex.getFirstSegmentNum(); int firstSegmentNum = segmentIndex.getFirstSegmentNum();
int lastSegmentNum = segmentIndex.getLastSegmentNum(durationUs); int lastSegmentNum = segmentIndex.getLastSegmentNum(periodDurationUs);
indexIsUnbounded = lastSegmentNum == DashSegmentIndex.INDEX_UNBOUNDED; indexIsUnbounded = lastSegmentNum == DashSegmentIndex.INDEX_UNBOUNDED;
indexIsExplicit = segmentIndex.isExplicit(); indexIsExplicit = segmentIndex.isExplicit();
availableStartTimeUs = startTimeUs + segmentIndex.getTimeUs(firstSegmentNum); availableStartTimeUs = startTimeUs + segmentIndex.getTimeUs(firstSegmentNum);
if (!indexIsUnbounded) { if (!indexIsUnbounded) {
availableEndTimeUs = startTimeUs + segmentIndex.getTimeUs(lastSegmentNum) availableEndTimeUs = startTimeUs + segmentIndex.getTimeUs(lastSegmentNum)
+ segmentIndex.getDurationUs(lastSegmentNum, durationUs); + segmentIndex.getDurationUs(lastSegmentNum, periodDurationUs);
} }
} else { } else {
indexIsUnbounded = false; indexIsUnbounded = false;
indexIsExplicit = true; indexIsExplicit = true;
availableStartTimeUs = startTimeUs; availableStartTimeUs = startTimeUs;
availableEndTimeUs = startTimeUs + durationUs; availableEndTimeUs = startTimeUs + periodDurationUs;
}
}
private static int getRepresentationIndex(List<Representation> representations,
String formatId) {
for (int i = 0; i < representations.size(); i++) {
Representation representation = representations.get(i);
if (formatId.equals(representation.format.id)) {
return i;
}
}
throw new IllegalStateException("Missing format id: " + formatId);
}
private static DrmInitData getDrmInitData(AdaptationSet adaptationSet) {
String drmInitMimeType = mimeTypeIsWebm(adaptationSet.representations.get(0).format.mimeType)
? MimeTypes.VIDEO_WEBM : MimeTypes.VIDEO_MP4;
if (adaptationSet.contentProtections.isEmpty()) {
return null;
} else {
DrmInitData.Mapped drmInitData = null;
for (int i = 0; i < adaptationSet.contentProtections.size(); i++) {
ContentProtection contentProtection = adaptationSet.contentProtections.get(i);
if (contentProtection.uuid != null && contentProtection.data != null) {
if (drmInitData == null) {
drmInitData = new DrmInitData.Mapped(drmInitMimeType);
}
drmInitData.put(contentProtection.uuid, contentProtection.data);
}
}
return drmInitData;
} }
} }
......
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import java.io.IOException;
/**
* Specifies a track selection from a {@link Period} of a media presentation description.
*/
public interface DashTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified representations in the specified
* adaptation set.
*
* @param manifest The media presentation description being processed.
* @param periodIndex The index of the period being processed.
* @param adaptationSetIndex The index of the adaptation set within which the representations
* are located.
* @param representationIndices The indices of the track within the element.
*/
void adaptiveTrack(MediaPresentationDescription manifest, int periodIndex,
int adaptationSetIndex, int[] representationIndices);
/**
* Outputs an fixed track corresponding to the specified representation in the specified
* adaptation set.
*
* @param manifest The media presentation description being processed.
* @param periodIndex The index of the period being processed.
* @param adaptationSetIndex The index of the adaptation set within which the track is located.
* @param representationIndex The index of the representation within the adaptation set.
*/
void fixedTrack(MediaPresentationDescription manifest, int periodIndex, int adaptationSetIndex,
int representationIndex);
}
/**
* Outputs a track selection for a given period.
*
* @param manifest the media presentation description to process.
* @param periodIndex The index of the period to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the period.
*/
void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output)
throws IOException;
}
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import com.google.android.exoplayer.util.Util;
import android.content.Context;
import java.io.IOException;
/**
* A default {@link DashTrackSelector} implementation.
*/
// TODO: Add more configuration options (e.g. ability to disable adaptive track output).
public final class DefaultDashTrackSelector implements DashTrackSelector {
private final int adaptationSetType;
private final Context context;
private final boolean filterVideoRepresentations;
private final boolean filterProtectedHdContent;
/**
* @param context A context. May be null if {@code filterVideoRepresentations == false}.
* @param filterVideoRepresentations Whether video representations should be filtered according to
* the capabilities of the device. It is strongly recommended to set this to {@code true},
* unless the application has already verified that all representations are playable.
* @param filterProtectedHdContent Whether video representations that are both drm protected and
* high definition should be filtered when tracks are built. If
* {@code filterVideoRepresentations == false} then this parameter is ignored.
*/
public static DefaultDashTrackSelector newVideoInstance(Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_VIDEO, context,
filterVideoRepresentations, filterProtectedHdContent);
}
public static DefaultDashTrackSelector newAudioInstance() {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_AUDIO, null, false, false);
}
public static DefaultDashTrackSelector newTextInstance() {
return new DefaultDashTrackSelector(AdaptationSet.TYPE_TEXT, null, false, false);
}
private DefaultDashTrackSelector(int adaptationSetType, Context context,
boolean filterVideoRepresentations, boolean filterProtectedHdContent) {
this.adaptationSetType = adaptationSetType;
this.context = context;
this.filterVideoRepresentations = filterVideoRepresentations;
this.filterProtectedHdContent = filterProtectedHdContent;
}
@Override
public void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output)
throws IOException {
Period period = manifest.getPeriod(periodIndex);
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
if (adaptationSet.type == adaptationSetType) {
if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
int[] representations;
if (filterVideoRepresentations) {
representations = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, adaptationSet.representations, null,
filterProtectedHdContent && adaptationSet.hasContentProtection());
} else {
representations = Util.firstIntegersArray(adaptationSet.representations.size());
}
output.adaptiveTrack(manifest, periodIndex, i, representations);
for (int j = 0; j < representations.length; j++) {
output.fixedTrack(manifest, periodIndex, i, representations[j]);
}
} else {
for (int j = 0; j < adaptationSet.representations.size(); j++) {
output.fixedTrack(manifest, periodIndex, i, j);
}
}
}
}
}
}
...@@ -312,6 +312,20 @@ public final class Util { ...@@ -312,6 +312,20 @@ public final class Util {
} }
/** /**
* Creates an integer array containing the integers from 0 to {@code length - 1}.
*
* @param length The length of the array.
* @return The array.
*/
public static int[] firstIntegersArray(int length) {
int[] firstIntegers = new int[length];
for (int i = 0; i < length; i++) {
firstIntegers[i] = i;
}
return firstIntegers;
}
/**
* Parses an xs:duration attribute value, returning the parsed duration in milliseconds. * Parses an xs:duration attribute value, returning the parsed duration in milliseconds.
* *
* @param value The attribute value to parse. * @param value The attribute value to parse.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment