Commit e89c40cf by Oliver Woodman

Handle renderer building cancellation.

If the manifest server response arrived after the player was released, the
renderer builder would set up renderers for a released player, causing an
exception to the thrown in the DASH case.

Also fix Issue #657
parent 9e0ca9e7
......@@ -41,7 +41,6 @@ import com.google.android.exoplayer.dash.mpd.UtcTimingElement;
import com.google.android.exoplayer.dash.mpd.UtcTimingElementResolver;
import com.google.android.exoplayer.dash.mpd.UtcTimingElementResolver.UtcTimingCallback;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilderCallback;
import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
import com.google.android.exoplayer.drm.UnsupportedDrmException;
......@@ -54,7 +53,6 @@ import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer.upstream.DefaultUriDataSource;
import com.google.android.exoplayer.upstream.UriDataSource;
import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.ManifestFetcher.ManifestCallback;
import com.google.android.exoplayer.util.Util;
import android.content.Context;
......@@ -69,8 +67,7 @@ import java.util.List;
/**
* A {@link RendererBuilder} for DASH.
*/
public class DashRendererBuilder implements RendererBuilder,
ManifestCallback<MediaPresentationDescription>, UtcTimingCallback {
public class DashRendererBuilder implements RendererBuilder {
private static final String TAG = "DashRendererBuilder";
......@@ -102,13 +99,7 @@ public class DashRendererBuilder implements RendererBuilder,
private final MediaDrmCallback drmCallback;
private final AudioCapabilities audioCapabilities;
private DemoPlayer player;
private RendererBuilderCallback callback;
private ManifestFetcher<MediaPresentationDescription> manifestFetcher;
private UriDataSource manifestDataSource;
private MediaPresentationDescription manifest;
private long elapsedRealtimeOffset;
private AsyncRendererBuilder currentAsyncBuilder;
public DashRendererBuilder(Context context, String userAgent, String url,
MediaDrmCallback drmCallback, AudioCapabilities audioCapabilities) {
......@@ -120,236 +111,294 @@ public class DashRendererBuilder implements RendererBuilder,
}
@Override
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
this.player = player;
this.callback = callback;
MediaPresentationDescriptionParser parser = new MediaPresentationDescriptionParser();
manifestDataSource = new DefaultUriDataSource(context, userAgent);
manifestFetcher = new ManifestFetcher<>(url, manifestDataSource, parser);
manifestFetcher.singleLoad(player.getMainHandler().getLooper(), this);
public void buildRenderers(DemoPlayer player) {
currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, drmCallback,
audioCapabilities, player);
currentAsyncBuilder.init();
}
@Override
public void onSingleManifest(MediaPresentationDescription manifest) {
this.manifest = manifest;
if (manifest.dynamic && manifest.utcTiming != null) {
UtcTimingElementResolver.resolveTimingElement(manifestDataSource, manifest.utcTiming,
manifestFetcher.getManifestLoadCompleteTimestamp(), this);
} else {
buildRenderers();
public void cancel() {
if (currentAsyncBuilder != null) {
currentAsyncBuilder.cancel();
currentAsyncBuilder = null;
}
}
@Override
public void onSingleManifestError(IOException e) {
callback.onRenderersError(e);
}
@Override
public void onTimestampResolved(UtcTimingElement utcTiming, long elapsedRealtimeOffset) {
this.elapsedRealtimeOffset = elapsedRealtimeOffset;
buildRenderers();
}
@Override
public void onTimestampError(UtcTimingElement utcTiming, IOException e) {
Log.e(TAG, "Failed to resolve UtcTiming element [" + utcTiming + "]", e);
// Be optimistic and continue in the hope that the device clock is correct.
buildRenderers();
}
private void buildRenderers() {
Period period = manifest.periods.get(0);
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
boolean hasContentProtection = false;
int videoAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_VIDEO);
int audioAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_AUDIO);
AdaptationSet videoAdaptationSet = null;
AdaptationSet audioAdaptationSet = null;
if (videoAdaptationSetIndex != -1) {
videoAdaptationSet = period.adaptationSets.get(videoAdaptationSetIndex);
hasContentProtection |= videoAdaptationSet.hasContentProtection();
private static final class AsyncRendererBuilder
implements ManifestFetcher.ManifestCallback<MediaPresentationDescription>, UtcTimingCallback {
private final Context context;
private final String userAgent;
private final MediaDrmCallback drmCallback;
private final AudioCapabilities audioCapabilities;
private final DemoPlayer player;
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
private final UriDataSource manifestDataSource;
private boolean canceled;
private MediaPresentationDescription manifest;
private long elapsedRealtimeOffset;
public AsyncRendererBuilder(Context context, String userAgent, String url,
MediaDrmCallback drmCallback, AudioCapabilities audioCapabilities, DemoPlayer player) {
this.context = context;
this.userAgent = userAgent;
this.drmCallback = drmCallback;
this.audioCapabilities = audioCapabilities;
this.player = player;
MediaPresentationDescriptionParser parser = new MediaPresentationDescriptionParser();
manifestDataSource = new DefaultUriDataSource(context, userAgent);
manifestFetcher = new ManifestFetcher<>(url, manifestDataSource, parser);
}
if (audioAdaptationSetIndex != -1) {
audioAdaptationSet = period.adaptationSets.get(audioAdaptationSetIndex);
hasContentProtection |= audioAdaptationSet.hasContentProtection();
public void init() {
manifestFetcher.singleLoad(player.getMainHandler().getLooper(), this);
}
// Fail if we have neither video or audio.
if (videoAdaptationSet == null && audioAdaptationSet == null) {
callback.onRenderersError(new IllegalStateException("No video or audio adaptation sets"));
return;
public void cancel() {
canceled = true;
}
// Check drm support if necessary.
boolean filterHdContent = false;
StreamingDrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(
new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
@Override
public void onSingleManifest(MediaPresentationDescription manifest) {
if (canceled) {
return;
}
try {
drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
filterHdContent = videoAdaptationSet != null && videoAdaptationSet.hasContentProtection()
&& getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
} catch (UnsupportedDrmException e) {
callback.onRenderersError(e);
this.manifest = manifest;
if (manifest.dynamic && manifest.utcTiming != null) {
UtcTimingElementResolver.resolveTimingElement(manifestDataSource, manifest.utcTiming,
manifestFetcher.getManifestLoadCompleteTimestamp(), this);
} else {
buildRenderers();
}
}
@Override
public void onSingleManifestError(IOException e) {
if (canceled) {
return;
}
player.onRenderersError(e);
}
// Determine which video representations we should use for playback.
int[] videoRepresentationIndices = null;
if (videoAdaptationSet != null) {
try {
videoRepresentationIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, videoAdaptationSet.representations, null, filterHdContent);
} catch (DecoderQueryException e) {
callback.onRenderersError(e);
@Override
public void onTimestampResolved(UtcTimingElement utcTiming, long elapsedRealtimeOffset) {
if (canceled) {
return;
}
this.elapsedRealtimeOffset = elapsedRealtimeOffset;
buildRenderers();
}
// Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer;
if (videoRepresentationIndices == null || videoRepresentationIndices.length == 0) {
videoRenderer = null;
} else {
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
videoAdaptationSetIndex, videoRepresentationIndices, videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
mainHandler, player);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
@Override
public void onTimestampError(UtcTimingElement utcTiming, IOException e) {
if (canceled) {
return;
}
Log.e(TAG, "Failed to resolve UtcTiming element [" + utcTiming + "]", e);
// Be optimistic and continue in the hope that the device clock is correct.
buildRenderers();
}
// Build the audio chunk sources.
List<ChunkSource> audioChunkSourceList = new ArrayList<>();
List<String> audioTrackNameList = new ArrayList<>();
if (audioAdaptationSet != null) {
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
List<Representation> audioRepresentations = audioAdaptationSet.representations;
List<String> codecs = new ArrayList<>();
for (int i = 0; i < audioRepresentations.size(); i++) {
Format format = audioRepresentations.get(i).format;
audioTrackNameList.add(format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)");
audioChunkSourceList.add(new DashChunkSource(manifestFetcher, audioAdaptationSetIndex,
new int[] {i}, audioDataSource, audioEvaluator, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player));
codecs.add(format.codecs);
private void buildRenderers() {
Period period = manifest.periods.get(0);
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
boolean hasContentProtection = false;
int videoAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_VIDEO);
int audioAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_AUDIO);
AdaptationSet videoAdaptationSet = null;
AdaptationSet audioAdaptationSet = null;
if (videoAdaptationSetIndex != -1) {
videoAdaptationSet = period.adaptationSets.get(videoAdaptationSetIndex);
hasContentProtection |= videoAdaptationSet.hasContentProtection();
}
if (audioAdaptationSetIndex != -1) {
audioAdaptationSet = period.adaptationSets.get(audioAdaptationSetIndex);
hasContentProtection |= audioAdaptationSet.hasContentProtection();
}
if (audioCapabilities != null) {
// If there are any passthrough audio encodings available, select the highest priority
// supported format (e.g. E-AC-3) and remove other tracks.
for (int i = 0; i < PASSTHROUGH_CODECS_PRIORITY.length; i++) {
String codec = PASSTHROUGH_CODECS_PRIORITY[i];
int encoding = PASSTHROUGH_ENCODINGS_PRIORITY[i];
if (codecs.indexOf(codec) == -1 || !audioCapabilities.supportsEncoding(encoding)) {
continue;
}
// Fail if we have neither video or audio.
if (videoAdaptationSet == null && audioAdaptationSet == null) {
player.onRenderersError(new IllegalStateException("No video or audio adaptation sets"));
return;
}
// Check drm support if necessary.
boolean filterHdContent = false;
StreamingDrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
player.onRenderersError(
new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
return;
}
try {
drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
filterHdContent = videoAdaptationSet != null && videoAdaptationSet.hasContentProtection()
&& getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
} catch (UnsupportedDrmException e) {
player.onRenderersError(e);
return;
}
}
// Determine which video representations we should use for playback.
int[] videoRepresentationIndices = null;
if (videoAdaptationSet != null) {
try {
videoRepresentationIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, videoAdaptationSet.representations, null, filterHdContent);
} catch (DecoderQueryException e) {
player.onRenderersError(e);
return;
}
}
// Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer;
if (videoRepresentationIndices == null || videoRepresentationIndices.length == 0) {
videoRenderer = null;
} else {
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
videoAdaptationSetIndex, videoRepresentationIndices, videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
mainHandler, player);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
}
// Build the audio chunk sources.
List<ChunkSource> audioChunkSourceList = new ArrayList<>();
List<String> audioTrackNameList = new ArrayList<>();
if (audioAdaptationSet != null) {
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
List<Representation> audioRepresentations = audioAdaptationSet.representations;
List<String> codecs = new ArrayList<>();
for (int i = 0; i < audioRepresentations.size(); i++) {
Format format = audioRepresentations.get(i).format;
audioTrackNameList.add(format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)");
audioChunkSourceList.add(new DashChunkSource(manifestFetcher, audioAdaptationSetIndex,
new int[] {i}, audioDataSource, audioEvaluator, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player));
codecs.add(format.codecs);
}
if (audioCapabilities != null) {
// If there are any passthrough audio encodings available, select the highest priority
// supported format (e.g. E-AC-3) and remove other tracks.
for (int i = 0; i < PASSTHROUGH_CODECS_PRIORITY.length; i++) {
String codec = PASSTHROUGH_CODECS_PRIORITY[i];
int encoding = PASSTHROUGH_ENCODINGS_PRIORITY[i];
if (codecs.indexOf(codec) == -1 || !audioCapabilities.supportsEncoding(encoding)) {
continue;
}
for (int j = audioRepresentations.size() - 1; j >= 0; j--) {
if (!audioRepresentations.get(j).format.codecs.equals(codec)) {
audioTrackNameList.remove(j);
audioChunkSourceList.remove(j);
for (int j = audioRepresentations.size() - 1; j >= 0; j--) {
if (!audioRepresentations.get(j).format.codecs.equals(codec)) {
audioTrackNameList.remove(j);
audioChunkSourceList.remove(j);
}
}
break;
}
break;
}
}
}
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final TrackRenderer audioRenderer;
if (audioChunkSourceList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
audioTrackNames = new String[audioTrackNameList.size()];
audioTrackNameList.toArray(audioTrackNames);
audioChunkSource = new MultiTrackChunkSource(audioChunkSourceList);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final TrackRenderer audioRenderer;
if (audioChunkSourceList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
audioTrackNames = new String[audioTrackNameList.size()];
audioTrackNameList.toArray(audioTrackNames);
audioChunkSource = new MultiTrackChunkSource(audioChunkSourceList);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the text chunk sources.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator textEvaluator = new FormatEvaluator.FixedEvaluator();
List<ChunkSource> textChunkSourceList = new ArrayList<>();
List<String> textTrackNameList = new ArrayList<>();
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
if (adaptationSet.type == AdaptationSet.TYPE_TEXT) {
List<Representation> representations = adaptationSet.representations;
for (int j = 0; j < representations.size(); j++) {
Representation representation = representations.get(j);
textTrackNameList.add(representation.format.id);
textChunkSourceList.add(new DashChunkSource(manifestFetcher, i, new int[] {j},
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
mainHandler, player));
// Build the text chunk sources.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator textEvaluator = new FormatEvaluator.FixedEvaluator();
List<ChunkSource> textChunkSourceList = new ArrayList<>();
List<String> textTrackNameList = new ArrayList<>();
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
if (adaptationSet.type == AdaptationSet.TYPE_TEXT) {
List<Representation> representations = adaptationSet.representations;
for (int j = 0; j < representations.size(); j++) {
Representation representation = representations.get(j);
textTrackNameList.add(representation.format.id);
textChunkSourceList.add(new DashChunkSource(manifestFetcher, i, new int[] {j},
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
mainHandler, player));
}
}
}
}
// Build the text renderers
final String[] textTrackNames;
final MultiTrackChunkSource textChunkSource;
final TrackRenderer textRenderer;
if (textChunkSourceList.isEmpty()) {
textTrackNames = null;
textChunkSource = null;
textRenderer = null;
} else {
textTrackNames = new String[textTrackNameList.size()];
textTrackNameList.toArray(textTrackNames);
textChunkSource = new MultiTrackChunkSource(textChunkSourceList);
SampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_TEXT);
textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper(),
new TtmlParser(), new WebvttParser());
// Build the text renderers
final String[] textTrackNames;
final MultiTrackChunkSource textChunkSource;
final TrackRenderer textRenderer;
if (textChunkSourceList.isEmpty()) {
textTrackNames = null;
textChunkSource = null;
textRenderer = null;
} else {
textTrackNames = new String[textTrackNameList.size()];
textTrackNameList.toArray(textTrackNames);
textChunkSource = new MultiTrackChunkSource(textChunkSourceList);
SampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_TEXT);
textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper(),
new TtmlParser(), new WebvttParser());
}
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
trackNames[DemoPlayer.TYPE_TEXT] = textTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
multiTrackChunkSources[DemoPlayer.TYPE_TEXT] = textChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(trackNames, multiTrackChunkSources, renderers, bandwidthMeter);
}
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
trackNames[DemoPlayer.TYPE_TEXT] = textTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
multiTrackChunkSources[DemoPlayer.TYPE_TEXT] = textChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers, bandwidthMeter);
}
private static int getWidevineSecurityLevel(StreamingDrmSessionManager sessionManager) {
String securityLevelProperty = sessionManager.getPropertyString("securityLevel");
return securityLevelProperty.equals("L1") ? SECURITY_LEVEL_1 : securityLevelProperty
.equals("L3") ? SECURITY_LEVEL_3 : SECURITY_LEVEL_UNKNOWN;
}
private static int getWidevineSecurityLevel(StreamingDrmSessionManager sessionManager) {
String securityLevelProperty = sessionManager.getPropertyString("securityLevel");
return securityLevelProperty.equals("L1") ? SECURITY_LEVEL_1 : securityLevelProperty
.equals("L3") ? SECURITY_LEVEL_3 : SECURITY_LEVEL_UNKNOWN;
}
}
......@@ -67,39 +67,20 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
*/
public interface RendererBuilder {
/**
* Constructs the necessary components for playback.
* Builds renderers for playback.
*
* @param player The parent player.
* @param callback The callback to invoke with the constructed components.
* @param player The player for which renderers are being built. {@link DemoPlayer#onRenderers}
* should be invoked once the renderers have been built. If building fails,
* {@link DemoPlayer#onRenderersError} should be invoked.
*/
void buildRenderers(DemoPlayer player, RendererBuilderCallback callback);
}
/**
* A callback invoked by a {@link RendererBuilder}.
*/
public interface RendererBuilderCallback {
void buildRenderers(DemoPlayer player);
/**
* Invoked with the results from a {@link RendererBuilder}.
*
* @param trackNames The names of the available tracks, indexed by {@link DemoPlayer} TYPE_*
* constants. May be null if the track names are unknown. An individual element may be null
* if the track names are unknown for the corresponding type.
* @param multiTrackSources Sources capable of switching between multiple available tracks,
* indexed by {@link DemoPlayer} TYPE_* constants. May be null if there are no types with
* multiple tracks. An individual element may be null if it does not have multiple tracks.
* @param renderers Renderers indexed by {@link DemoPlayer} TYPE_* constants. An individual
* element may be null if there do not exist tracks of the corresponding type.
* @param bandwidthMeter Provides an estimate of the currently available bandwidth. May be null.
* Cancels the current build operation, if there is one. Else does nothing.
* <p>
* A canceled build operation must not invoke {@link DemoPlayer#onRenderers} or
* {@link DemoPlayer#onRenderersError} on the player, which may have been released.
*/
void onRenderers(String[][] trackNames, MultiTrackChunkSource[] multiTrackSources,
TrackRenderer[] renderers, BandwidthMeter bandwidthMeter);
/**
* Invoked if a {@link RendererBuilder} encounters an error.
*
* @param e Describes the error.
*/
void onRenderersError(Exception e);
void cancel();
}
/**
......@@ -191,7 +172,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
private boolean lastReportedPlayWhenReady;
private Surface surface;
private InternalRendererBuilderCallback builderCallback;
private TrackRenderer videoRenderer;
private CodecCounters codecCounters;
private Format videoFormat;
......@@ -305,22 +285,31 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
if (rendererBuildingState == RENDERER_BUILDING_STATE_BUILT) {
player.stop();
}
if (builderCallback != null) {
builderCallback.cancel();
}
rendererBuilder.cancel();
videoFormat = null;
videoRenderer = null;
multiTrackSources = null;
rendererBuildingState = RENDERER_BUILDING_STATE_BUILDING;
maybeReportPlayerState();
builderCallback = new InternalRendererBuilderCallback();
rendererBuilder.buildRenderers(this, builderCallback);
rendererBuilder.buildRenderers(this);
}
/**
* Invoked with the results from a {@link RendererBuilder}.
*
* @param trackNames The names of the available tracks, indexed by {@link DemoPlayer} TYPE_*
* constants. May be null if the track names are unknown. An individual element may be null
* if the track names are unknown for the corresponding type.
* @param multiTrackSources Sources capable of switching between multiple available tracks,
* indexed by {@link DemoPlayer} TYPE_* constants. May be null if there are no types with
* multiple tracks. An individual element may be null if it does not have multiple tracks.
* @param renderers Renderers indexed by {@link DemoPlayer} TYPE_* constants. An individual
* element may be null if there do not exist tracks of the corresponding type.
* @param bandwidthMeter Provides an estimate of the currently available bandwidth. May be null.
*/
/* package */ void onRenderers(String[][] trackNames,
MultiTrackChunkSource[] multiTrackSources, TrackRenderer[] renderers,
BandwidthMeter bandwidthMeter) {
builderCallback = null;
// Normalize the results.
if (trackNames == null) {
trackNames = new String[RENDERER_COUNT][];
......@@ -357,8 +346,12 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
rendererBuildingState = RENDERER_BUILDING_STATE_BUILT;
}
/**
* Invoked if a {@link RendererBuilder} encounters an error.
*
* @param e Describes the error.
*/
/* package */ void onRenderersError(Exception e) {
builderCallback = null;
if (internalErrorListener != null) {
internalErrorListener.onRendererInitializationError(e);
}
......@@ -378,10 +371,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
public void release() {
if (builderCallback != null) {
builderCallback.cancel();
builderCallback = null;
}
rendererBuilder.cancel();
rendererBuildingState = RENDERER_BUILDING_STATE_IDLE;
surface = null;
player.release();
......@@ -390,14 +380,13 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
public int getPlaybackState() {
if (rendererBuildingState == RENDERER_BUILDING_STATE_BUILDING) {
return ExoPlayer.STATE_PREPARING;
return STATE_PREPARING;
}
int playerState = player.getPlaybackState();
if (rendererBuildingState == RENDERER_BUILDING_STATE_BUILT
&& rendererBuildingState == RENDERER_BUILDING_STATE_IDLE) {
if (rendererBuildingState == RENDERER_BUILDING_STATE_BUILT && playerState == STATE_IDLE) {
// This is an edge case where the renderers are built, but are still being passed to the
// player's playback thread.
return ExoPlayer.STATE_PREPARING;
return STATE_PREPARING;
}
return playerState;
}
......@@ -645,29 +634,4 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
}
private class InternalRendererBuilderCallback implements RendererBuilderCallback {
private boolean canceled;
public void cancel() {
canceled = true;
}
@Override
public void onRenderers(String[][] trackNames, MultiTrackChunkSource[] multiTrackSources,
TrackRenderer[] renderers, BandwidthMeter bandwidthMeter) {
if (!canceled) {
DemoPlayer.this.onRenderers(trackNames, multiTrackSources, renderers, bandwidthMeter);
}
}
@Override
public void onRenderersError(Exception e) {
if (!canceled) {
DemoPlayer.this.onRenderersError(e);
}
}
}
}
......@@ -19,7 +19,6 @@ import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilderCallback;
import com.google.android.exoplayer.extractor.Extractor;
import com.google.android.exoplayer.extractor.ExtractorSampleSource;
import com.google.android.exoplayer.text.TextTrackRenderer;
......@@ -52,7 +51,7 @@ public class ExtractorRendererBuilder implements RendererBuilder {
}
@Override
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
public void buildRenderers(DemoPlayer player) {
Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);
// Build the video and audio renderers.
......@@ -74,7 +73,12 @@ public class ExtractorRendererBuilder implements RendererBuilder {
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
callback.onRenderers(null, null, renderers, bandwidthMeter);
player.onRenderers(null, null, renderers, bandwidthMeter);
}
@Override
public void cancel() {
// Do nothing.
}
}
......@@ -24,7 +24,6 @@ import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilderCallback;
import com.google.android.exoplayer.hls.HlsChunkSource;
import com.google.android.exoplayer.hls.HlsMasterPlaylist;
import com.google.android.exoplayer.hls.HlsPlaylist;
......@@ -50,7 +49,7 @@ import java.util.Map;
/**
* A {@link RendererBuilder} for HLS.
*/
public class HlsRendererBuilder implements RendererBuilder, ManifestCallback<HlsPlaylist> {
public class HlsRendererBuilder implements RendererBuilder {
private static final int BUFFER_SEGMENT_SIZE = 256 * 1024;
private static final int BUFFER_SEGMENTS = 64;
......@@ -60,8 +59,7 @@ public class HlsRendererBuilder implements RendererBuilder, ManifestCallback<Hls
private final String url;
private final AudioCapabilities audioCapabilities;
private DemoPlayer player;
private RendererBuilderCallback callback;
private AsyncRendererBuilder currentAsyncBuilder;
public HlsRendererBuilder(Context context, String userAgent, String url,
AudioCapabilities audioCapabilities) {
......@@ -72,59 +70,103 @@ public class HlsRendererBuilder implements RendererBuilder, ManifestCallback<Hls
}
@Override
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
this.player = player;
this.callback = callback;
HlsPlaylistParser parser = new HlsPlaylistParser();
ManifestFetcher<HlsPlaylist> playlistFetcher = new ManifestFetcher<>(url,
new DefaultUriDataSource(context, userAgent), parser);
playlistFetcher.singleLoad(player.getMainHandler().getLooper(), this);
public void buildRenderers(DemoPlayer player) {
currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, audioCapabilities,
player);
currentAsyncBuilder.init();
}
@Override
public void onSingleManifestError(IOException e) {
callback.onRenderersError(e);
public void cancel() {
if (currentAsyncBuilder != null) {
currentAsyncBuilder.cancel();
currentAsyncBuilder = null;
}
}
@Override
public void onSingleManifest(HlsPlaylist manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
int[] variantIndices = null;
if (manifest instanceof HlsMasterPlaylist) {
HlsMasterPlaylist masterPlaylist = (HlsMasterPlaylist) manifest;
try {
variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, masterPlaylist.variants, null, false);
} catch (DecoderQueryException e) {
callback.onRenderersError(e);
private static final class AsyncRendererBuilder implements ManifestCallback<HlsPlaylist> {
private final Context context;
private final String userAgent;
private final String url;
private final AudioCapabilities audioCapabilities;
private final DemoPlayer player;
private final ManifestFetcher<HlsPlaylist> playlistFetcher;
private boolean canceled;
public AsyncRendererBuilder(Context context, String userAgent, String url,
AudioCapabilities audioCapabilities, DemoPlayer player) {
this.context = context;
this.userAgent = userAgent;
this.url = url;
this.audioCapabilities = audioCapabilities;
this.player = player;
HlsPlaylistParser parser = new HlsPlaylistParser();
playlistFetcher = new ManifestFetcher<>(url, new DefaultUriDataSource(context, userAgent),
parser);
}
public void init() {
playlistFetcher.singleLoad(player.getMainHandler().getLooper(), this);
}
public void cancel() {
canceled = true;
}
@Override
public void onSingleManifestError(IOException e) {
if (canceled) {
return;
}
player.onRenderersError(e);
}
@Override
public void onSingleManifest(HlsPlaylist manifest) {
if (canceled) {
return;
}
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
int[] variantIndices = null;
if (manifest instanceof HlsMasterPlaylist) {
HlsMasterPlaylist masterPlaylist = (HlsMasterPlaylist) manifest;
try {
variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, masterPlaylist.variants, null, false);
} catch (DecoderQueryException e) {
player.onRenderersError(e);
return;
}
}
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter,
variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE, audioCapabilities);
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player, DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);
MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>(
sampleSource, new Id3Parser(), player, mainHandler.getLooper());
Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player,
mainHandler.getLooper());
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_METADATA] = id3Renderer;
renderers[DemoPlayer.TYPE_TEXT] = closedCaptionRenderer;
player.onRenderers(null, null, renderers, bandwidthMeter);
}
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter,
variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE, audioCapabilities);
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player, DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);
MetadataTrackRenderer<Map<String, Object>> id3Renderer =
new MetadataTrackRenderer<>(sampleSource, new Id3Parser(), player, mainHandler.getLooper());
Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player,
mainHandler.getLooper());
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_METADATA] = id3Renderer;
renderers[DemoPlayer.TYPE_TEXT] = closedCaptionRenderer;
callback.onRenderers(null, null, renderers, bandwidthMeter);
}
}
......@@ -28,7 +28,6 @@ import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilderCallback;
import com.google.android.exoplayer.drm.DrmSessionManager;
import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
......@@ -57,8 +56,7 @@ import java.util.Arrays;
/**
* A {@link RendererBuilder} for SmoothStreaming.
*/
public class SmoothStreamingRendererBuilder implements RendererBuilder,
ManifestFetcher.ManifestCallback<SmoothStreamingManifest> {
public class SmoothStreamingRendererBuilder implements RendererBuilder {
private static final int BUFFER_SEGMENT_SIZE = 64 * 1024;
private static final int VIDEO_BUFFER_SEGMENTS = 200;
......@@ -71,179 +69,218 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
private final String url;
private final MediaDrmCallback drmCallback;
private DemoPlayer player;
private RendererBuilderCallback callback;
private ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
private AsyncRendererBuilder currentAsyncBuilder;
public SmoothStreamingRendererBuilder(Context context, String userAgent, String url,
MediaDrmCallback drmCallback) {
this.context = context;
this.userAgent = userAgent;
this.url = url;
this.url = Util.toLowerInvariant(url).endsWith("/manifest") ? url : url + "/Manifest";
this.drmCallback = drmCallback;
}
@Override
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
this.player = player;
this.callback = callback;
String manifestUrl = url;
if (!manifestUrl.endsWith("/Manifest")) {
manifestUrl += "/Manifest";
}
SmoothStreamingManifestParser parser = new SmoothStreamingManifestParser();
manifestFetcher = new ManifestFetcher<>(manifestUrl,
new DefaultHttpDataSource(userAgent, null), parser);
manifestFetcher.singleLoad(player.getMainHandler().getLooper(), this);
public void buildRenderers(DemoPlayer player) {
currentAsyncBuilder = new AsyncRendererBuilder(context, userAgent, url, drmCallback, player);
currentAsyncBuilder.init();
}
@Override
public void onSingleManifestError(IOException exception) {
callback.onRenderersError(exception);
public void cancel() {
if (currentAsyncBuilder != null) {
currentAsyncBuilder.cancel();
currentAsyncBuilder = null;
}
}
@Override
public void onSingleManifest(SmoothStreamingManifest manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (manifest.protectionElement != null) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(
new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
return;
}
try {
drmSessionManager = new StreamingDrmSessionManager(manifest.protectionElement.uuid,
player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
} catch (UnsupportedDrmException e) {
callback.onRenderersError(e);
return;
}
private static final class AsyncRendererBuilder
implements ManifestFetcher.ManifestCallback<SmoothStreamingManifest> {
private final Context context;
private final String userAgent;
private final MediaDrmCallback drmCallback;
private final DemoPlayer player;
private final ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
private boolean canceled;
public AsyncRendererBuilder(Context context, String userAgent, String url,
MediaDrmCallback drmCallback, DemoPlayer player) {
this.context = context;
this.userAgent = userAgent;
this.drmCallback = drmCallback;
this.player = player;
SmoothStreamingManifestParser parser = new SmoothStreamingManifestParser();
manifestFetcher = new ManifestFetcher<>(url, new DefaultHttpDataSource(userAgent, null),
parser);
}
// Obtain stream elements for playback.
int audioStreamElementCount = 0;
int textStreamElementCount = 0;
int videoStreamElementIndex = -1;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) {
audioStreamElementCount++;
} else if (manifest.streamElements[i].type == StreamElement.TYPE_TEXT) {
textStreamElementCount++;
} else if (videoStreamElementIndex == -1
&& manifest.streamElements[i].type == StreamElement.TYPE_VIDEO) {
videoStreamElementIndex = i;
}
public void init() {
manifestFetcher.singleLoad(player.getMainHandler().getLooper(), this);
}
public void cancel() {
canceled = true;
}
// Determine which video tracks we should use for playback.
int[] videoTrackIndices = null;
if (videoStreamElementIndex != -1) {
try {
videoTrackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(context,
Arrays.asList(manifest.streamElements[videoStreamElementIndex].tracks), null, false);
} catch (DecoderQueryException e) {
callback.onRenderersError(e);
@Override
public void onSingleManifestError(IOException exception) {
if (canceled) {
return;
}
}
// Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer;
if (videoTrackIndices == null || videoTrackIndices.length == 0) {
videoRenderer = null;
} else {
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
videoStreamElementIndex, videoTrackIndices, videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
player.onRenderersError(exception);
}
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioStreamElementCount == 0) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
audioTrackNames = new String[audioStreamElementCount];
ChunkSource[] audioChunkSources = new ChunkSource[audioStreamElementCount];
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator audioFormatEvaluator = new FormatEvaluator.FixedEvaluator();
audioStreamElementCount = 0;
@Override
public void onSingleManifest(SmoothStreamingManifest manifest) {
if (canceled) {
return;
}
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Check drm support if necessary.
DrmSessionManager drmSessionManager = null;
if (manifest.protectionElement != null) {
if (Util.SDK_INT < 18) {
player.onRenderersError(
new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
return;
}
try {
drmSessionManager = new StreamingDrmSessionManager(manifest.protectionElement.uuid,
player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
} catch (UnsupportedDrmException e) {
player.onRenderersError(e);
return;
}
}
// Obtain stream elements for playback.
int audioStreamElementCount = 0;
int textStreamElementCount = 0;
int videoStreamElementIndex = -1;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) {
audioTrackNames[audioStreamElementCount] = manifest.streamElements[i].name;
audioChunkSources[audioStreamElementCount] = new SmoothStreamingChunkSource(
manifestFetcher, i, new int[] {0}, audioDataSource, audioFormatEvaluator,
LIVE_EDGE_LATENCY_MS);
audioStreamElementCount++;
} else if (manifest.streamElements[i].type == StreamElement.TYPE_TEXT) {
textStreamElementCount++;
} else if (videoStreamElementIndex == -1
&& manifest.streamElements[i].type == StreamElement.TYPE_VIDEO) {
videoStreamElementIndex = i;
}
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
// Build the text renderer.
final String[] textTrackNames;
final MultiTrackChunkSource textChunkSource;
final TrackRenderer textRenderer;
if (textStreamElementCount == 0) {
textTrackNames = null;
textChunkSource = null;
textRenderer = null;
} else {
textTrackNames = new String[textStreamElementCount];
ChunkSource[] textChunkSources = new ChunkSource[textStreamElementCount];
DataSource ttmlDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator ttmlFormatEvaluator = new FormatEvaluator.FixedEvaluator();
textStreamElementCount = 0;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_TEXT) {
textTrackNames[textStreamElementCount] = manifest.streamElements[i].language;
textChunkSources[textStreamElementCount] = new SmoothStreamingChunkSource(manifestFetcher,
i, new int[] {0}, ttmlDataSource, ttmlFormatEvaluator, LIVE_EDGE_LATENCY_MS);
textStreamElementCount++;
// Determine which video tracks we should use for playback.
int[] videoTrackIndices = null;
if (videoStreamElementIndex != -1) {
try {
videoTrackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(context,
Arrays.asList(manifest.streamElements[videoStreamElementIndex].tracks), null, false);
} catch (DecoderQueryException e) {
player.onRenderersError(e);
return;
}
}
// Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer;
if (videoTrackIndices == null || videoTrackIndices.length == 0) {
videoRenderer = null;
} else {
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
videoStreamElementIndex, videoTrackIndices, videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
}
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioStreamElementCount == 0) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
audioTrackNames = new String[audioStreamElementCount];
ChunkSource[] audioChunkSources = new ChunkSource[audioStreamElementCount];
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator audioFormatEvaluator = new FormatEvaluator.FixedEvaluator();
audioStreamElementCount = 0;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) {
audioTrackNames[audioStreamElementCount] = manifest.streamElements[i].name;
audioChunkSources[audioStreamElementCount] = new SmoothStreamingChunkSource(
manifestFetcher, i, new int[] {0}, audioDataSource, audioFormatEvaluator,
LIVE_EDGE_LATENCY_MS);
audioStreamElementCount++;
}
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player);
}
textChunkSource = new MultiTrackChunkSource(textChunkSources);
ChunkSampleSource ttmlSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_TEXT);
textRenderer = new TextTrackRenderer(ttmlSampleSource, player, mainHandler.getLooper(),
new TtmlParser());
// Build the text renderer.
final String[] textTrackNames;
final MultiTrackChunkSource textChunkSource;
final TrackRenderer textRenderer;
if (textStreamElementCount == 0) {
textTrackNames = null;
textChunkSource = null;
textRenderer = null;
} else {
textTrackNames = new String[textStreamElementCount];
ChunkSource[] textChunkSources = new ChunkSource[textStreamElementCount];
DataSource ttmlDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator ttmlFormatEvaluator = new FormatEvaluator.FixedEvaluator();
textStreamElementCount = 0;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_TEXT) {
textTrackNames[textStreamElementCount] = manifest.streamElements[i].language;
textChunkSources[textStreamElementCount] = new SmoothStreamingChunkSource(
manifestFetcher, i, new int[] {0}, ttmlDataSource, ttmlFormatEvaluator,
LIVE_EDGE_LATENCY_MS);
textStreamElementCount++;
}
}
textChunkSource = new MultiTrackChunkSource(textChunkSources);
ChunkSampleSource ttmlSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_TEXT);
textRenderer = new TextTrackRenderer(ttmlSampleSource, player, mainHandler.getLooper(),
new TtmlParser());
}
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
trackNames[DemoPlayer.TYPE_TEXT] = textTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
multiTrackChunkSources[DemoPlayer.TYPE_TEXT] = textChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(trackNames, multiTrackChunkSources, renderers, bandwidthMeter);
}
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
trackNames[DemoPlayer.TYPE_TEXT] = textTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
multiTrackChunkSources[DemoPlayer.TYPE_TEXT] = textChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers, bandwidthMeter);
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment