Commit ce03b8c2 by andrewlewis Committed by Oliver Woodman

Move AudioTrack initialization into handleBuffer.

Move initialization code from the audio renderers into handleBuffer. The audio
session id and tunneling enabled state can be altered using new setters.

This change is not intended to change the behavior of the audio renderers.
Before this change, the AudioTrack would always be reinitialized by the
renderers before calling handleBuffer.

Add a protected onAudioTrackUnderrun method for custom renderers that want to
implement custom handling of underruns.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=144067149
parent 5c89bbed
...@@ -38,21 +38,21 @@ import java.nio.ByteOrder; ...@@ -38,21 +38,21 @@ import java.nio.ByteOrder;
* playback position smoothing, non-blocking writes and reconfiguration. * playback position smoothing, non-blocking writes and reconfiguration.
* <p> * <p>
* Before starting playback, specify the input format by calling * Before starting playback, specify the input format by calling
* {@link #configure(String, int, int, int, int)}. Next call {@link #initialize(int)} or * {@link #configure(String, int, int, int, int)}. Optionally call {@link #setAudioSessionId(int)},
* {@link #initializeV21(int, boolean)}, optionally specifying an audio session and whether the * {@link #setTunnelingEnabledV21(boolean)} and {@link #setStreamType(int)} to configure audio
* track is to be used with tunneling video playback. * playback. These methods may be called after writing data to the track, in which case it will be
* reinitialized as required.
* <p> * <p>
* Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()} * Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()}
* when the data being fed is discontinuous. Call {@link #play()} to start playing the written data. * when the data being fed is discontinuous. Call {@link #play()} to start playing the written data.
* <p> * <p>
* Call {@link #configure(String, int, int, int, int)} whenever the input format changes. If * Call {@link #configure(String, int, int, int, int)} whenever the input format changes. The track
* {@link #isInitialized()} returns {@code false} after the call, it is necessary to call * will be reinitialized on the next call to {@link #handleBuffer(ByteBuffer, long)}.
* {@link #initialize(int)} or {@link #initializeV21(int, boolean)} before writing more data.
* <p> * <p>
* The underlying {@link android.media.AudioTrack} is created by {@link #initialize(int)} and * Calling {@link #reset()} releases the underlying {@link android.media.AudioTrack} (and so does
* released by {@link #reset()} (and {@link #configure(String, int, int, int, int)} unless the input * calling {@link #configure(String, int, int, int, int)} unless the format is unchanged). It is
* format is unchanged). It is safe to call {@link #initialize(int)} or * safe to call {@link #handleBuffer(ByteBuffer, long)} after {@link #reset()} without calling
* {@link #initializeV21(int, boolean)} after calling {@link #reset()} without reconfiguration. * {@link #configure(String, int, int, int, int)}.
* <p> * <p>
* Call {@link #release()} when the instance is no longer required. * Call {@link #release()} when the instance is no longer required.
*/ */
...@@ -74,6 +74,13 @@ public final class AudioTrack { ...@@ -74,6 +74,13 @@ public final class AudioTrack {
*/ */
void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs); void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs);
/**
* Called when the audio track has been initialized with the specified {@code audioSessionId}.
*
* @param audioSessionId The audio session id.
*/
void onAudioSessionId(int audioSessionId);
} }
/** /**
...@@ -253,7 +260,7 @@ public final class AudioTrack { ...@@ -253,7 +260,7 @@ public final class AudioTrack {
private final AudioTrackUtil audioTrackUtil; private final AudioTrackUtil audioTrackUtil;
/** /**
* Used to keep the audio session active on pre-V21 builds (see {@link #initialize(int)}). * Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}).
*/ */
private android.media.AudioTrack keepSessionIdAudioTrack; private android.media.AudioTrack keepSessionIdAudioTrack;
...@@ -299,6 +306,9 @@ public final class AudioTrack { ...@@ -299,6 +306,9 @@ public final class AudioTrack {
private ByteBuffer resampledBuffer; private ByteBuffer resampledBuffer;
private boolean useResampledBuffer; private boolean useResampledBuffer;
private boolean playing;
private int audioSessionId;
private boolean tunneling;
private boolean hasData; private boolean hasData;
private long lastFeedElapsedRealtimeMs; private long lastFeedElapsedRealtimeMs;
...@@ -329,6 +339,7 @@ public final class AudioTrack { ...@@ -329,6 +339,7 @@ public final class AudioTrack {
volume = 1.0f; volume = 1.0f;
startMediaTimeState = START_NOT_SET; startMediaTimeState = START_NOT_SET;
streamType = C.STREAM_TYPE_DEFAULT; streamType = C.STREAM_TYPE_DEFAULT;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
} }
/** /**
...@@ -343,14 +354,6 @@ public final class AudioTrack { ...@@ -343,14 +354,6 @@ public final class AudioTrack {
} }
/** /**
* Returns whether the audio track has been successfully initialized via {@link #initialize} or
* {@link #initializeV21(int, boolean)}, and has not yet been {@link #reset}.
*/
public boolean isInitialized() {
return audioTrack != null;
}
/**
* Returns the playback position in the stream starting at zero, in microseconds, or * Returns the playback position in the stream starting at zero, in microseconds, or
* {@link #CURRENT_POSITION_NOT_SET} if it is not yet available. * {@link #CURRENT_POSITION_NOT_SET} if it is not yet available.
* *
...@@ -512,31 +515,7 @@ public final class AudioTrack { ...@@ -512,31 +515,7 @@ public final class AudioTrack {
bufferSizeUs = passthrough ? C.TIME_UNSET : framesToDurationUs(pcmBytesToFrames(bufferSize)); bufferSizeUs = passthrough ? C.TIME_UNSET : framesToDurationUs(pcmBytesToFrames(bufferSize));
} }
/** private void initialize() throws InitializationException {
* Initializes the audio track for writing new buffers using {@link #handleBuffer}.
*
* @param sessionId Audio track session identifier, or {@link C#AUDIO_SESSION_ID_UNSET} to create
* one.
* @return The audio track session identifier.
*/
public int initialize(int sessionId) throws InitializationException {
return initializeInternal(sessionId, false);
}
/**
* Initializes the audio track for writing new buffers using {@link #handleBuffer}.
*
* @param sessionId Audio track session identifier, or {@link C#AUDIO_SESSION_ID_UNSET} to create
* one.
* @param tunneling Whether the audio track is to be used with tunneling video playback.
* @return The audio track session identifier.
*/
public int initializeV21(int sessionId, boolean tunneling) throws InitializationException {
Assertions.checkState(Util.SDK_INT >= 21);
return initializeInternal(sessionId, tunneling);
}
private int initializeInternal(int sessionId, boolean tunneling) throws InitializationException {
// If we're asynchronously releasing a previous audio track then we block until it has been // If we're asynchronously releasing a previous audio track then we block until it has been
// released. This guarantees that we cannot end up in a state where we have multiple audio // released. This guarantees that we cannot end up in a state where we have multiple audio
// track instances. Without this guarantee it would be possible, in extreme cases, to exhaust // track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
...@@ -547,24 +526,24 @@ public final class AudioTrack { ...@@ -547,24 +526,24 @@ public final class AudioTrack {
useHwAvSync = tunneling; useHwAvSync = tunneling;
if (useHwAvSync) { if (useHwAvSync) {
audioTrack = createHwAvSyncAudioTrackV21(sampleRate, channelConfig, targetEncoding, audioTrack = createHwAvSyncAudioTrackV21(sampleRate, channelConfig, targetEncoding,
bufferSize, sessionId); bufferSize, audioSessionId);
} else if (sessionId == C.AUDIO_SESSION_ID_UNSET) { } else if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) {
audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig, audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig,
targetEncoding, bufferSize, MODE_STREAM); targetEncoding, bufferSize, MODE_STREAM);
} else { } else {
// Re-attach to the same audio session. // Re-attach to the same audio session.
audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig, audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig,
targetEncoding, bufferSize, MODE_STREAM, sessionId); targetEncoding, bufferSize, MODE_STREAM, audioSessionId);
} }
checkAudioTrackInitialized(); checkAudioTrackInitialized();
sessionId = audioTrack.getAudioSessionId(); int audioSessionId = audioTrack.getAudioSessionId();
if (enablePreV21AudioSessionWorkaround) { if (enablePreV21AudioSessionWorkaround) {
if (Util.SDK_INT < 21) { if (Util.SDK_INT < 21) {
// The workaround creates an audio track with a two byte buffer on the same session, and // The workaround creates an audio track with a two byte buffer on the same session, and
// does not release it until this object is released, which keeps the session active. // does not release it until this object is released, which keeps the session active.
if (keepSessionIdAudioTrack != null if (keepSessionIdAudioTrack != null
&& sessionId != keepSessionIdAudioTrack.getAudioSessionId()) { && audioSessionId != keepSessionIdAudioTrack.getAudioSessionId()) {
releaseKeepSessionIdAudioTrack(); releaseKeepSessionIdAudioTrack();
} }
if (keepSessionIdAudioTrack == null) { if (keepSessionIdAudioTrack == null) {
...@@ -573,21 +552,25 @@ public final class AudioTrack { ...@@ -573,21 +552,25 @@ public final class AudioTrack {
@C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT; @C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT;
int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback. int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback.
keepSessionIdAudioTrack = new android.media.AudioTrack(streamType, sampleRate, keepSessionIdAudioTrack = new android.media.AudioTrack(streamType, sampleRate,
channelConfig, encoding, bufferSize, MODE_STATIC, sessionId); channelConfig, encoding, bufferSize, MODE_STATIC, audioSessionId);
} }
} }
} }
if (this.audioSessionId != audioSessionId) {
this.audioSessionId = audioSessionId;
listener.onAudioSessionId(audioSessionId);
}
audioTrackUtil.reconfigure(audioTrack, needsPassthroughWorkarounds()); audioTrackUtil.reconfigure(audioTrack, needsPassthroughWorkarounds());
setAudioTrackVolume(); setVolumeInternal();
hasData = false; hasData = false;
return sessionId;
} }
/** /**
* Starts or resumes playing audio if the audio track has been initialized. * Starts or resumes playing audio if the audio track has been initialized.
*/ */
public void play() { public void play() {
playing = true;
if (isInitialized()) { if (isInitialized()) {
resumeSystemTimeUs = System.nanoTime() / 1000; resumeSystemTimeUs = System.nanoTime() / 1000;
audioTrack.play(); audioTrack.play();
...@@ -622,9 +605,18 @@ public final class AudioTrack { ...@@ -622,9 +605,18 @@ public final class AudioTrack {
* @return A bit field with {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released, and * @return A bit field with {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released, and
* {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was not contiguous with previously * {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was not contiguous with previously
* written data. * written data.
* @throws InitializationException If an error occurs initializing the track.
* @throws WriteException If an error occurs writing the audio data. * @throws WriteException If an error occurs writing the audio data.
*/ */
public int handleBuffer(ByteBuffer buffer, long presentationTimeUs) throws WriteException { public int handleBuffer(ByteBuffer buffer, long presentationTimeUs)
throws InitializationException, WriteException {
if (!isInitialized()) {
initialize();
if (playing) {
play();
}
}
boolean hadData = hasData; boolean hadData = hasData;
hasData = hasPendingData(); hasData = hasPendingData();
if (hadData && !hasData && audioTrack.getPlayState() != PLAYSTATE_STOPPED) { if (hadData && !hasData && audioTrack.getPlayState() != PLAYSTATE_STOPPED) {
...@@ -785,28 +777,52 @@ public final class AudioTrack { ...@@ -785,28 +777,52 @@ public final class AudioTrack {
/** /**
* Sets the stream type for audio track. If the stream type has changed and if the audio track * Sets the stream type for audio track. If the stream type has changed and if the audio track
* is not configured for use with video tunneling, then the audio track is reset and the caller * is not configured for use with video tunneling, then the audio track is reset and will be
* must re-initialize the audio track before writing more data. The caller must not reuse the * reinitialized on the next call to {@link #handleBuffer(ByteBuffer, long)}. An audio session
* audio session identifier when re-initializing with a new stream type. * cannot be reused after a change of stream type, so the audio session identifier will be reset.
* <p> * <p>
* If the audio track is configured for use with video tunneling then the stream type is ignored * If the audio track is configured for use with video tunneling then the stream type is ignored
* and the audio track is not reset. The passed stream type will be used if the audio track is * and the audio track is not reset. The passed stream type will be used if the audio track is
* later re-configured into non-tunneled mode. * later re-configured into non-tunneled mode.
* *
* @param streamType The {@link C.StreamType} to use for audio output. * @param streamType The {@link C.StreamType} to use for audio output.
* @return Whether the audio track was reset as a result of this call.
*/ */
public boolean setStreamType(@C.StreamType int streamType) { public void setStreamType(@C.StreamType int streamType) {
if (this.streamType == streamType) { if (this.streamType == streamType) {
return false; return;
} }
this.streamType = streamType; this.streamType = streamType;
if (useHwAvSync) { if (useHwAvSync) {
// The stream type is ignored in tunneling mode, so no need to reset. // The stream type is ignored in tunneling mode, so no need to reset.
return false; return;
} }
reset(); reset();
return true; audioSessionId = C.AUDIO_SESSION_ID_UNSET;
}
/**
* Sets the audio session id, and resets the audio track if the audio session id has changed.
*/
public void setAudioSessionId(int audioSessionId) {
if (this.audioSessionId != audioSessionId) {
this.audioSessionId = audioSessionId;
reset();
}
}
/**
* Sets whether tunneling is enabled. Enabling tunneling requires platform API version 21 onwards.
* Resets the audio track if tunneling was enabled/disabled.
*
* @param tunneling Whether the audio track will be used with tunneling video playback.
* @throws IllegalStateException Thrown if enabling tunneling on platform API version < 21.
*/
public void setTunnelingEnabledV21(boolean tunneling) {
if (this.tunneling != tunneling) {
Assertions.checkState(Util.SDK_INT >= 21);
this.tunneling = tunneling;
reset();
}
} }
/** /**
...@@ -817,17 +833,17 @@ public final class AudioTrack { ...@@ -817,17 +833,17 @@ public final class AudioTrack {
public void setVolume(float volume) { public void setVolume(float volume) {
if (this.volume != volume) { if (this.volume != volume) {
this.volume = volume; this.volume = volume;
setAudioTrackVolume(); setVolumeInternal();
} }
} }
private void setAudioTrackVolume() { private void setVolumeInternal() {
if (!isInitialized()) { if (!isInitialized()) {
// Do nothing. // Do nothing.
} else if (Util.SDK_INT >= 21) { } else if (Util.SDK_INT >= 21) {
setAudioTrackVolumeV21(audioTrack, volume); setVolumeInternalV21(audioTrack, volume);
} else { } else {
setAudioTrackVolumeV3(audioTrack, volume); setVolumeInternalV3(audioTrack, volume);
} }
} }
...@@ -835,6 +851,7 @@ public final class AudioTrack { ...@@ -835,6 +851,7 @@ public final class AudioTrack {
* Pauses playback. * Pauses playback.
*/ */
public void pause() { public void pause() {
playing = false;
if (isInitialized()) { if (isInitialized()) {
resetSyncParams(); resetSyncParams();
audioTrackUtil.pause(); audioTrackUtil.pause();
...@@ -844,9 +861,9 @@ public final class AudioTrack { ...@@ -844,9 +861,9 @@ public final class AudioTrack {
/** /**
* Releases the underlying audio track asynchronously. * Releases the underlying audio track asynchronously.
* <p> * <p>
* Calling {@link #initialize(int)} or {@link #initializeV21(int, boolean)} will block until the * Calling {@link #handleBuffer(ByteBuffer, long)} will block until the audio track has been
* audio track has been released, so it is safe to initialize immediately after a reset. The audio * released, so it is safe to use the audio track immediately after a reset. The audio session may
* session may remain active until {@link #release()} is called. * remain active until {@link #release()} is called.
*/ */
public void reset() { public void reset() {
if (isInitialized()) { if (isInitialized()) {
...@@ -887,6 +904,8 @@ public final class AudioTrack { ...@@ -887,6 +904,8 @@ public final class AudioTrack {
public void release() { public void release() {
reset(); reset();
releaseKeepSessionIdAudioTrack(); releaseKeepSessionIdAudioTrack();
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
playing = false;
} }
/** /**
...@@ -1024,6 +1043,10 @@ public final class AudioTrack { ...@@ -1024,6 +1043,10 @@ public final class AudioTrack {
throw new InitializationException(state, sampleRate, channelConfig, bufferSize); throw new InitializationException(state, sampleRate, channelConfig, bufferSize);
} }
private boolean isInitialized() {
return audioTrack != null;
}
private long pcmBytesToFrames(long byteCount) { private long pcmBytesToFrames(long byteCount) {
return byteCount / pcmFrameSize; return byteCount / pcmFrameSize;
} }
...@@ -1240,12 +1263,12 @@ public final class AudioTrack { ...@@ -1240,12 +1263,12 @@ public final class AudioTrack {
} }
@TargetApi(21) @TargetApi(21)
private static void setAudioTrackVolumeV21(android.media.AudioTrack audioTrack, float volume) { private static void setVolumeInternalV21(android.media.AudioTrack audioTrack, float volume) {
audioTrack.setVolume(volume); audioTrack.setVolume(volume);
} }
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
private static void setAudioTrackVolumeV3(android.media.AudioTrack audioTrack, float volume) { private static void setVolumeInternalV3(android.media.AudioTrack audioTrack, float volume) {
audioTrack.setStereoVolume(volume, volume); audioTrack.setStereoVolume(volume, volume);
} }
...@@ -1494,7 +1517,7 @@ public final class AudioTrack { ...@@ -1494,7 +1517,7 @@ public final class AudioTrack {
playbackParams = (playbackParams != null ? playbackParams : new PlaybackParams()) playbackParams = (playbackParams != null ? playbackParams : new PlaybackParams())
.allowDefaults(); .allowDefaults();
this.playbackParams = playbackParams; this.playbackParams = playbackParams;
this.playbackSpeed = playbackParams.getSpeed(); playbackSpeed = playbackParams.getSpeed();
maybeApplyPlaybackParams(); maybeApplyPlaybackParams();
} }
......
...@@ -41,8 +41,7 @@ import java.nio.ByteBuffer; ...@@ -41,8 +41,7 @@ import java.nio.ByteBuffer;
* Decodes and renders audio using {@link MediaCodec} and {@link AudioTrack}. * Decodes and renders audio using {@link MediaCodec} and {@link AudioTrack}.
*/ */
@TargetApi(16) @TargetApi(16)
public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock, public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock {
AudioTrack.Listener {
private final EventDispatcher eventDispatcher; private final EventDispatcher eventDispatcher;
private final AudioTrack audioTrack; private final AudioTrack audioTrack;
...@@ -50,7 +49,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media ...@@ -50,7 +49,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private boolean passthroughEnabled; private boolean passthroughEnabled;
private android.media.MediaFormat passthroughMediaFormat; private android.media.MediaFormat passthroughMediaFormat;
private int pcmEncoding; private int pcmEncoding;
private int audioSessionId;
private long currentPositionUs; private long currentPositionUs;
private boolean allowPositionDiscontinuity; private boolean allowPositionDiscontinuity;
...@@ -129,8 +127,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media ...@@ -129,8 +127,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
boolean playClearSamplesWithoutKeys, Handler eventHandler, boolean playClearSamplesWithoutKeys, Handler eventHandler,
AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities) { AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities) {
super(C.TRACK_TYPE_AUDIO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys); super(C.TRACK_TYPE_AUDIO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys);
audioSessionId = C.AUDIO_SESSION_ID_UNSET; audioTrack = new AudioTrack(audioCapabilities, new AudioTrackListener());
audioTrack = new AudioTrack(audioCapabilities, this);
eventDispatcher = new EventDispatcher(eventHandler, eventListener); eventDispatcher = new EventDispatcher(eventHandler, eventListener);
} }
...@@ -246,6 +243,20 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media ...@@ -246,6 +243,20 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
// Do nothing. // Do nothing.
} }
/**
* Called when an {@link AudioTrack} underrun occurs.
*
* @param bufferSize The size of the {@link AudioTrack}'s buffer, in bytes.
* @param bufferSizeMs The size of the {@link AudioTrack}'s buffer, in milliseconds, if it is
* configured for PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output,
* as the buffered media can have a variable bitrate so the duration may be unknown.
* @param elapsedSinceLastFeedMs The time since the {@link AudioTrack} was last fed data.
*/
protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs,
long elapsedSinceLastFeedMs) {
// Do nothing.
}
@Override @Override
protected void onEnabled(boolean joining) throws ExoPlaybackException { protected void onEnabled(boolean joining) throws ExoPlaybackException {
super.onEnabled(joining); super.onEnabled(joining);
...@@ -274,7 +285,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media ...@@ -274,7 +285,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Override @Override
protected void onDisabled() { protected void onDisabled() {
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
try { try {
audioTrack.release(); audioTrack.release();
} finally { } finally {
...@@ -325,28 +335,10 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media ...@@ -325,28 +335,10 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
return true; return true;
} }
if (!audioTrack.isInitialized()) {
// Initialize the AudioTrack now.
try {
if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) {
audioSessionId = audioTrack.initialize(C.AUDIO_SESSION_ID_UNSET);
eventDispatcher.audioSessionId(audioSessionId);
onAudioSessionId(audioSessionId);
} else {
audioTrack.initialize(audioSessionId);
}
} catch (AudioTrack.InitializationException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
if (getState() == STATE_STARTED) {
audioTrack.play();
}
}
int handleBufferResult; int handleBufferResult;
try { try {
handleBufferResult = audioTrack.handleBuffer(buffer, bufferPresentationTimeUs); handleBufferResult = audioTrack.handleBuffer(buffer, bufferPresentationTimeUs);
} catch (AudioTrack.WriteException e) { } catch (AudioTrack.InitializationException | AudioTrack.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex()); throw ExoPlaybackException.createForRenderer(e, getIndex());
} }
...@@ -386,9 +378,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media ...@@ -386,9 +378,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
break; break;
case C.MSG_SET_STREAM_TYPE: case C.MSG_SET_STREAM_TYPE:
@C.StreamType int streamType = (Integer) message; @C.StreamType int streamType = (Integer) message;
if (audioTrack.setStreamType(streamType)) { audioTrack.setStreamType(streamType);
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
}
break; break;
default: default:
super.handleMessage(messageType, message); super.handleMessage(messageType, message);
...@@ -396,11 +386,21 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media ...@@ -396,11 +386,21 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} }
} }
// AudioTrack.Listener implementation. private final class AudioTrackListener implements AudioTrack.Listener {
@Override
public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
MediaCodecAudioRenderer.this.onAudioTrackUnderrun(bufferSize, bufferSizeMs,
elapsedSinceLastFeedMs);
}
@Override
public void onAudioSessionId(int audioSessionId) {
eventDispatcher.audioSessionId(audioSessionId);
MediaCodecAudioRenderer.this.onAudioSessionId(audioSessionId);
}
@Override
public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
} }
} }
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
package com.google.android.exoplayer2.audio; package com.google.android.exoplayer2.audio;
import android.media.PlaybackParams; import android.media.PlaybackParams;
import android.media.audiofx.Virtualizer;
import android.os.Handler; import android.os.Handler;
import android.os.Looper; import android.os.Looper;
import android.os.SystemClock; import android.os.SystemClock;
...@@ -43,8 +44,7 @@ import java.lang.annotation.RetentionPolicy; ...@@ -43,8 +44,7 @@ import java.lang.annotation.RetentionPolicy;
/** /**
* Decodes and renders audio using a {@link SimpleDecoder}. * Decodes and renders audio using a {@link SimpleDecoder}.
*/ */
public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements MediaClock, public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements MediaClock {
AudioTrack.Listener {
@Retention(RetentionPolicy.SOURCE) @Retention(RetentionPolicy.SOURCE)
@IntDef({REINITIALIZATION_STATE_NONE, REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM, @IntDef({REINITIALIZATION_STATE_NONE, REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM,
...@@ -94,8 +94,6 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements ...@@ -94,8 +94,6 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
private boolean outputStreamEnded; private boolean outputStreamEnded;
private boolean waitingForKeys; private boolean waitingForKeys;
private int audioSessionId;
public SimpleDecoderAudioRenderer() { public SimpleDecoderAudioRenderer() {
this(null, null); this(null, null);
} }
...@@ -141,11 +139,10 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements ...@@ -141,11 +139,10 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
DrmSessionManager<ExoMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys) { DrmSessionManager<ExoMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys) {
super(C.TRACK_TYPE_AUDIO); super(C.TRACK_TYPE_AUDIO);
eventDispatcher = new EventDispatcher(eventHandler, eventListener); eventDispatcher = new EventDispatcher(eventHandler, eventListener);
audioTrack = new AudioTrack(audioCapabilities, this); audioTrack = new AudioTrack(audioCapabilities, new AudioTrackListener());
this.drmSessionManager = drmSessionManager; this.drmSessionManager = drmSessionManager;
formatHolder = new FormatHolder(); formatHolder = new FormatHolder();
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys; this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
decoderReinitializationState = REINITIALIZATION_STATE_NONE; decoderReinitializationState = REINITIALIZATION_STATE_NONE;
audioTrackNeedsConfigure = true; audioTrackNeedsConfigure = true;
} }
...@@ -186,6 +183,36 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements ...@@ -186,6 +183,36 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
} }
/** /**
* Called when the audio session id becomes known. Once the id is known it will not change (and
* hence this method will not be called again) unless the renderer is disabled and then
* subsequently re-enabled.
* <p>
* The default implementation is a no-op. One reason for overriding this method would be to
* instantiate and enable a {@link Virtualizer} in order to spatialize the audio channels. For
* this use case, any {@link Virtualizer} instances should be released in {@link #onDisabled()}
* (if not before).
*
* @param audioSessionId The audio session id.
*/
protected void onAudioSessionId(int audioSessionId) {
// Do nothing.
}
/**
* Called when an {@link AudioTrack} underrun occurs.
*
* @param bufferSize The size of the {@link AudioTrack}'s buffer, in bytes.
* @param bufferSizeMs The size of the {@link AudioTrack}'s buffer, in milliseconds, if it is
* configured for PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output,
* as the buffered media can have a variable bitrate so the duration may be unknown.
* @param elapsedSinceLastFeedMs The time since the {@link AudioTrack} was last fed data.
*/
protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs,
long elapsedSinceLastFeedMs) {
// Do nothing.
}
/**
* Creates a decoder for the given format. * Creates a decoder for the given format.
* *
* @param format The format for which a decoder is required. * @param format The format for which a decoder is required.
...@@ -244,19 +271,6 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements ...@@ -244,19 +271,6 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
audioTrackNeedsConfigure = false; audioTrackNeedsConfigure = false;
} }
if (!audioTrack.isInitialized()) {
if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) {
audioSessionId = audioTrack.initialize(C.AUDIO_SESSION_ID_UNSET);
eventDispatcher.audioSessionId(audioSessionId);
onAudioSessionId(audioSessionId);
} else {
audioTrack.initialize(audioSessionId);
}
if (getState() == STATE_STARTED) {
audioTrack.play();
}
}
int handleBufferResult = audioTrack.handleBuffer(outputBuffer.data, outputBuffer.timeUs); int handleBufferResult = audioTrack.handleBuffer(outputBuffer.data, outputBuffer.timeUs);
// If we are out of sync, allow currentPositionUs to jump backwards. // If we are out of sync, allow currentPositionUs to jump backwards.
...@@ -381,19 +395,6 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements ...@@ -381,19 +395,6 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
return currentPositionUs; return currentPositionUs;
} }
/**
* Called when the audio session id becomes known. Once the id is known it will not change (and
* hence this method will not be called again) unless the renderer is disabled and then
* subsequently re-enabled.
* <p>
* The default implementation is a no-op.
*
* @param audioSessionId The audio session id.
*/
protected void onAudioSessionId(int audioSessionId) {
// Do nothing.
}
@Override @Override
protected void onEnabled(boolean joining) throws ExoPlaybackException { protected void onEnabled(boolean joining) throws ExoPlaybackException {
decoderCounters = new DecoderCounters(); decoderCounters = new DecoderCounters();
...@@ -425,7 +426,6 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements ...@@ -425,7 +426,6 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
@Override @Override
protected void onDisabled() { protected void onDisabled() {
inputFormat = null; inputFormat = null;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
audioTrackNeedsConfigure = true; audioTrackNeedsConfigure = true;
waitingForKeys = false; waitingForKeys = false;
try { try {
...@@ -553,9 +553,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements ...@@ -553,9 +553,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
break; break;
case C.MSG_SET_STREAM_TYPE: case C.MSG_SET_STREAM_TYPE:
@C.StreamType int streamType = (Integer) message; @C.StreamType int streamType = (Integer) message;
if (audioTrack.setStreamType(streamType)) { audioTrack.setStreamType(streamType);
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
}
break; break;
default: default:
super.handleMessage(messageType, message); super.handleMessage(messageType, message);
...@@ -563,11 +561,21 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements ...@@ -563,11 +561,21 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
} }
} }
// AudioTrack.Listener implementation. private final class AudioTrackListener implements AudioTrack.Listener {
@Override
public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
SimpleDecoderAudioRenderer.this.onAudioTrackUnderrun(bufferSize, bufferSizeMs,
elapsedSinceLastFeedMs);
}
@Override
public void onAudioSessionId(int audioSessionId) {
eventDispatcher.audioSessionId(audioSessionId);
SimpleDecoderAudioRenderer.this.onAudioSessionId(audioSessionId);
}
@Override
public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
} }
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment