Commit 501f4642 by andrewlewis Committed by kim-vde

Add support for using framework speed adjustment

AudioTrack.setPlaybackParams can be used to adjust playback speed.
This is preferable to application-level speed adjustment (currently
implemented in ExoPlayer) from API 23 because the speed change
occurs in the mixer, which means that the audio track buffer doesn't
need to drain out before the speed adjustment takes effect.

Issue: #7502
PiperOrigin-RevId: 326392301
parent b95c984d
...@@ -122,6 +122,9 @@ ...@@ -122,6 +122,9 @@
* Add floating point PCM output capability in `MediaCodecAudioRenderer`, * Add floating point PCM output capability in `MediaCodecAudioRenderer`,
and `LibopusAudioRenderer`. and `LibopusAudioRenderer`.
* Do not use a MediaCodec for PCM formats if AudioTrack supports it. * Do not use a MediaCodec for PCM formats if AudioTrack supports it.
* Add optional support for using framework audio speed adjustment instead
of application-level audio speed adjustment
([#7502](https://github.com/google/ExoPlayer/issues/7502)).
* Text: * Text:
* Recreate the decoder when handling and swallowing decode errors in * Recreate the decoder when handling and swallowing decode errors in
`TextRenderer`. This fixes a case where playback would never end when `TextRenderer`. This fixes a case where playback would never end when
......
...@@ -17,6 +17,7 @@ package com.google.android.exoplayer2; ...@@ -17,6 +17,7 @@ package com.google.android.exoplayer2;
import android.content.Context; import android.content.Context;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.PlaybackParams;
import android.os.Handler; import android.os.Handler;
import android.os.Looper; import android.os.Looper;
import androidx.annotation.IntDef; import androidx.annotation.IntDef;
...@@ -94,6 +95,7 @@ public class DefaultRenderersFactory implements RenderersFactory { ...@@ -94,6 +95,7 @@ public class DefaultRenderersFactory implements RenderersFactory {
private @MediaCodecRenderer.MediaCodecOperationMode int audioMediaCodecOperationMode; private @MediaCodecRenderer.MediaCodecOperationMode int audioMediaCodecOperationMode;
private @MediaCodecRenderer.MediaCodecOperationMode int videoMediaCodecOperationMode; private @MediaCodecRenderer.MediaCodecOperationMode int videoMediaCodecOperationMode;
private boolean enableFloatOutput; private boolean enableFloatOutput;
private boolean enableAudioTrackPlaybackParams;
private boolean enableOffload; private boolean enableOffload;
/** @param context A {@link Context}. */ /** @param context A {@link Context}. */
...@@ -259,6 +261,30 @@ public class DefaultRenderersFactory implements RenderersFactory { ...@@ -259,6 +261,30 @@ public class DefaultRenderersFactory implements RenderersFactory {
} }
/** /**
* Sets whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, which is supported from API level
* 23, rather than using application-level audio speed adjustment. This setting has no effect on
* builds before API level 23 (application-level speed adjustment will be used in all cases).
*
* <p>If enabled and supported, new playback speed settings will take effect more quickly because
* they are applied at the audio mixer, rather than at the point of writing data to the track.
*
* <p>When using this mode, the maximum supported playback speed is limited by the size of the
* audio track's buffer. If the requested speed is not supported the player's event listener will
* be notified twice on setting playback speed, once with the requested speed, then again with the
* old playback speed reflecting the fact that the requested speed was not supported.
*
* @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}.
* @return This factory, for convenience.
*/
public DefaultRenderersFactory setEnableAudioTrackPlaybackParams(
boolean enableAudioTrackPlaybackParams) {
this.enableAudioTrackPlaybackParams = enableAudioTrackPlaybackParams;
return this;
}
/**
* Sets the maximum duration for which video renderers can attempt to seamlessly join an ongoing * Sets the maximum duration for which video renderers can attempt to seamlessly join an ongoing
* playback. * playback.
* *
...@@ -290,7 +316,9 @@ public class DefaultRenderersFactory implements RenderersFactory { ...@@ -290,7 +316,9 @@ public class DefaultRenderersFactory implements RenderersFactory {
videoRendererEventListener, videoRendererEventListener,
allowedVideoJoiningTimeMs, allowedVideoJoiningTimeMs,
renderersList); renderersList);
@Nullable AudioSink audioSink = buildAudioSink(context, enableFloatOutput, enableOffload); @Nullable
AudioSink audioSink =
buildAudioSink(context, enableFloatOutput, enableAudioTrackPlaybackParams, enableOffload);
if (audioSink != null) { if (audioSink != null) {
buildAudioRenderers( buildAudioRenderers(
context, context,
...@@ -611,6 +639,8 @@ public class DefaultRenderersFactory implements RenderersFactory { ...@@ -611,6 +639,8 @@ public class DefaultRenderersFactory implements RenderersFactory {
* *
* @param context The {@link Context} associated with the player. * @param context The {@link Context} associated with the player.
* @param enableFloatOutput Whether to enable use of floating point audio output, if available. * @param enableFloatOutput Whether to enable use of floating point audio output, if available.
* @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, if supported.
* @param enableOffload Whether to enable use of audio offload for supported formats, if * @param enableOffload Whether to enable use of audio offload for supported formats, if
* available. * available.
* @return The {@link AudioSink} to which the audio renderers will output. May be {@code null} if * @return The {@link AudioSink} to which the audio renderers will output. May be {@code null} if
...@@ -619,11 +649,15 @@ public class DefaultRenderersFactory implements RenderersFactory { ...@@ -619,11 +649,15 @@ public class DefaultRenderersFactory implements RenderersFactory {
*/ */
@Nullable @Nullable
protected AudioSink buildAudioSink( protected AudioSink buildAudioSink(
Context context, boolean enableFloatOutput, boolean enableOffload) { Context context,
boolean enableFloatOutput,
boolean enableAudioTrackPlaybackParams,
boolean enableOffload) {
return new DefaultAudioSink( return new DefaultAudioSink(
AudioCapabilities.getCapabilities(context), AudioCapabilities.getCapabilities(context),
new DefaultAudioProcessorChain(), new DefaultAudioProcessorChain(),
enableFloatOutput, enableFloatOutput,
enableAudioTrackPlaybackParams,
enableOffload); enableOffload);
} }
} }
...@@ -625,7 +625,7 @@ public interface ExoPlayer extends Player { ...@@ -625,7 +625,7 @@ public interface ExoPlayer extends Player {
* <li>audio offload rendering is enabled in {@link * <li>audio offload rendering is enabled in {@link
* DefaultRenderersFactory#setEnableAudioOffload} or the equivalent option passed to {@link * DefaultRenderersFactory#setEnableAudioOffload} or the equivalent option passed to {@link
* com.google.android.exoplayer2.audio.DefaultAudioSink#DefaultAudioSink(AudioCapabilities, * com.google.android.exoplayer2.audio.DefaultAudioSink#DefaultAudioSink(AudioCapabilities,
* DefaultAudioSink.AudioProcessorChain, boolean, boolean)}. * DefaultAudioSink.AudioProcessorChain, boolean, boolean, boolean)}.
* <li>an audio track is playing in a format which the device supports offloading (for example * <li>an audio track is playing in a format which the device supports offloading (for example
* MP3 or AAC). * MP3 or AAC).
* <li>The {@link com.google.android.exoplayer2.audio.AudioSink} is playing with an offload * <li>The {@link com.google.android.exoplayer2.audio.AudioSink} is playing with an offload
......
...@@ -144,6 +144,7 @@ import java.lang.reflect.Method; ...@@ -144,6 +144,7 @@ import java.lang.reflect.Method;
private int outputSampleRate; private int outputSampleRate;
private boolean needsPassthroughWorkarounds; private boolean needsPassthroughWorkarounds;
private long bufferSizeUs; private long bufferSizeUs;
private float audioTrackPlaybackSpeed;
private long smoothedPlayheadOffsetUs; private long smoothedPlayheadOffsetUs;
private long lastPlayheadSampleTimeUs; private long lastPlayheadSampleTimeUs;
...@@ -223,6 +224,16 @@ import java.lang.reflect.Method; ...@@ -223,6 +224,16 @@ import java.lang.reflect.Method;
forceResetWorkaroundTimeMs = C.TIME_UNSET; forceResetWorkaroundTimeMs = C.TIME_UNSET;
lastLatencySampleTimeUs = 0; lastLatencySampleTimeUs = 0;
latencyUs = 0; latencyUs = 0;
audioTrackPlaybackSpeed = 1f;
}
public void setAudioTrackPlaybackSpeed(float audioTrackPlaybackSpeed) {
this.audioTrackPlaybackSpeed = audioTrackPlaybackSpeed;
// Extrapolation from the last audio timestamp relies on the audio rate being constant, so we
// reset audio timestamp tracking and wait for a new timestamp.
if (audioTimestampPoller != null) {
audioTimestampPoller.reset();
}
} }
public long getCurrentPositionUs(boolean sourceEnded) { public long getCurrentPositionUs(boolean sourceEnded) {
...@@ -241,6 +252,8 @@ import java.lang.reflect.Method; ...@@ -241,6 +252,8 @@ import java.lang.reflect.Method;
long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames(); long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
long timestampPositionUs = framesToDurationUs(timestampPositionFrames); long timestampPositionUs = framesToDurationUs(timestampPositionFrames);
long elapsedSinceTimestampUs = systemTimeUs - audioTimestampPoller.getTimestampSystemTimeUs(); long elapsedSinceTimestampUs = systemTimeUs - audioTimestampPoller.getTimestampSystemTimeUs();
elapsedSinceTimestampUs =
Util.getMediaDurationForPlayoutDuration(elapsedSinceTimestampUs, audioTrackPlaybackSpeed);
positionUs = timestampPositionUs + elapsedSinceTimestampUs; positionUs = timestampPositionUs + elapsedSinceTimestampUs;
} else { } else {
if (playheadOffsetCount == 0) { if (playheadOffsetCount == 0) {
......
...@@ -22,6 +22,7 @@ import android.annotation.SuppressLint; ...@@ -22,6 +22,7 @@ import android.annotation.SuppressLint;
import android.media.AudioFormat; import android.media.AudioFormat;
import android.media.AudioManager; import android.media.AudioManager;
import android.media.AudioTrack; import android.media.AudioTrack;
import android.media.PlaybackParams;
import android.os.ConditionVariable; import android.os.ConditionVariable;
import android.os.Handler; import android.os.Handler;
import android.os.SystemClock; import android.os.SystemClock;
...@@ -273,6 +274,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -273,6 +274,7 @@ public final class DefaultAudioSink implements AudioSink {
private final ConditionVariable releasingConditionVariable; private final ConditionVariable releasingConditionVariable;
private final AudioTrackPositionTracker audioTrackPositionTracker; private final AudioTrackPositionTracker audioTrackPositionTracker;
private final ArrayDeque<MediaPositionParameters> mediaPositionParametersCheckpoints; private final ArrayDeque<MediaPositionParameters> mediaPositionParametersCheckpoints;
private final boolean enableAudioTrackPlaybackParams;
private final boolean enableOffload; private final boolean enableOffload;
@MonotonicNonNull private StreamEventCallbackV29 offloadStreamEventCallbackV29; @MonotonicNonNull private StreamEventCallbackV29 offloadStreamEventCallbackV29;
...@@ -287,6 +289,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -287,6 +289,7 @@ public final class DefaultAudioSink implements AudioSink {
private AudioAttributes audioAttributes; private AudioAttributes audioAttributes;
@Nullable private MediaPositionParameters afterDrainParameters; @Nullable private MediaPositionParameters afterDrainParameters;
private MediaPositionParameters mediaPositionParameters; private MediaPositionParameters mediaPositionParameters;
private float audioTrackPlaybackSpeed;
@Nullable private ByteBuffer avSyncHeader; @Nullable private ByteBuffer avSyncHeader;
private int bytesUntilNextAvSync; private int bytesUntilNextAvSync;
...@@ -359,6 +362,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -359,6 +362,7 @@ public final class DefaultAudioSink implements AudioSink {
audioCapabilities, audioCapabilities,
new DefaultAudioProcessorChain(audioProcessors), new DefaultAudioProcessorChain(audioProcessors),
enableFloatOutput, enableFloatOutput,
/* enableAudioTrackPlaybackParams= */ false,
/* enableOffload= */ false); /* enableOffload= */ false);
} }
...@@ -375,6 +379,8 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -375,6 +379,8 @@ public final class DefaultAudioSink implements AudioSink {
* (24-bit or 32-bit) integer PCM. Float output is supported from API level 21. Audio * (24-bit or 32-bit) integer PCM. Float output is supported from API level 21. Audio
* processing (for example, speed adjustment) will not be available when float output is in * processing (for example, speed adjustment) will not be available when float output is in
* use. * use.
* @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, if supported.
* @param enableOffload Whether to enable audio offload. If an audio format can be both played * @param enableOffload Whether to enable audio offload. If an audio format can be both played
* with offload and encoded audio passthrough, it will be played in offload. Audio offload is * with offload and encoded audio passthrough, it will be played in offload. Audio offload is
* supported from API level 29. Most Android devices can only support one offload {@link * supported from API level 29. Most Android devices can only support one offload {@link
...@@ -386,10 +392,12 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -386,10 +392,12 @@ public final class DefaultAudioSink implements AudioSink {
@Nullable AudioCapabilities audioCapabilities, @Nullable AudioCapabilities audioCapabilities,
AudioProcessorChain audioProcessorChain, AudioProcessorChain audioProcessorChain,
boolean enableFloatOutput, boolean enableFloatOutput,
boolean enableAudioTrackPlaybackParams,
boolean enableOffload) { boolean enableOffload) {
this.audioCapabilities = audioCapabilities; this.audioCapabilities = audioCapabilities;
this.audioProcessorChain = Assertions.checkNotNull(audioProcessorChain); this.audioProcessorChain = Assertions.checkNotNull(audioProcessorChain);
this.enableFloatOutput = Util.SDK_INT >= 21 && enableFloatOutput; this.enableFloatOutput = Util.SDK_INT >= 21 && enableFloatOutput;
this.enableAudioTrackPlaybackParams = Util.SDK_INT >= 23 && enableAudioTrackPlaybackParams;
this.enableOffload = Util.SDK_INT >= 29 && enableOffload; this.enableOffload = Util.SDK_INT >= 29 && enableOffload;
releasingConditionVariable = new ConditionVariable(true); releasingConditionVariable = new ConditionVariable(true);
audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener()); audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener());
...@@ -414,6 +422,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -414,6 +422,7 @@ public final class DefaultAudioSink implements AudioSink {
DEFAULT_SKIP_SILENCE, DEFAULT_SKIP_SILENCE,
/* mediaTimeUs= */ 0, /* mediaTimeUs= */ 0,
/* audioTrackPositionUs= */ 0); /* audioTrackPositionUs= */ 0);
audioTrackPlaybackSpeed = 1f;
drainingAudioProcessorIndex = C.INDEX_UNSET; drainingAudioProcessorIndex = C.INDEX_UNSET;
activeAudioProcessors = new AudioProcessor[0]; activeAudioProcessors = new AudioProcessor[0];
outputBuffers = new ByteBuffer[0]; outputBuffers = new ByteBuffer[0];
...@@ -641,7 +650,10 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -641,7 +650,10 @@ public final class DefaultAudioSink implements AudioSink {
startMediaTimeUs = max(0, presentationTimeUs); startMediaTimeUs = max(0, presentationTimeUs);
startMediaTimeUsNeedsSync = false; startMediaTimeUsNeedsSync = false;
applyPlaybackSpeedAndSkipSilence(presentationTimeUs); if (enableAudioTrackPlaybackParams && Util.SDK_INT >= 23) {
setAudioTrackPlaybackSpeedV23(audioTrackPlaybackSpeed);
}
applyAudioProcessorPlaybackSpeedAndSkipSilence(presentationTimeUs);
audioTrackPositionTracker.setAudioTrack( audioTrackPositionTracker.setAudioTrack(
audioTrack, audioTrack,
...@@ -701,7 +713,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -701,7 +713,7 @@ public final class DefaultAudioSink implements AudioSink {
} }
} }
// Re-apply playback parameters. // Re-apply playback parameters.
applyPlaybackSpeedAndSkipSilence(presentationTimeUs); applyAudioProcessorPlaybackSpeedAndSkipSilence(presentationTimeUs);
} }
if (!isInitialized()) { if (!isInitialized()) {
...@@ -740,7 +752,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -740,7 +752,7 @@ public final class DefaultAudioSink implements AudioSink {
// Don't process any more input until draining completes. // Don't process any more input until draining completes.
return false; return false;
} }
applyPlaybackSpeedAndSkipSilence(presentationTimeUs); applyAudioProcessorPlaybackSpeedAndSkipSilence(presentationTimeUs);
afterDrainParameters = null; afterDrainParameters = null;
} }
...@@ -771,7 +783,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -771,7 +783,7 @@ public final class DefaultAudioSink implements AudioSink {
startMediaTimeUs += adjustmentUs; startMediaTimeUs += adjustmentUs;
startMediaTimeUsNeedsSync = false; startMediaTimeUsNeedsSync = false;
// Re-apply playback parameters because the startMediaTimeUs changed. // Re-apply playback parameters because the startMediaTimeUs changed.
applyPlaybackSpeedAndSkipSilence(presentationTimeUs); applyAudioProcessorPlaybackSpeedAndSkipSilence(presentationTimeUs);
if (listener != null && adjustmentUs != 0) { if (listener != null && adjustmentUs != 0) {
listener.onPositionDiscontinuity(); listener.onPositionDiscontinuity();
} }
...@@ -985,17 +997,24 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -985,17 +997,24 @@ public final class DefaultAudioSink implements AudioSink {
@Override @Override
public void setPlaybackSpeed(float playbackSpeed) { public void setPlaybackSpeed(float playbackSpeed) {
setPlaybackSpeedAndSkipSilence(playbackSpeed, getSkipSilenceEnabled()); if (enableAudioTrackPlaybackParams && Util.SDK_INT >= 23) {
setAudioTrackPlaybackSpeedV23(playbackSpeed);
} else {
setAudioProcessorPlaybackSpeedAndSkipSilence(playbackSpeed, getSkipSilenceEnabled());
}
} }
@Override @Override
public float getPlaybackSpeed() { public float getPlaybackSpeed() {
return getMediaPositionParameters().playbackSpeed; // We use either audio processor speed adjustment or AudioTrack playback parameters, so one of
// the operands is always 1f.
return getAudioProcessorPlaybackSpeed() * audioTrackPlaybackSpeed;
} }
@Override @Override
public void setSkipSilenceEnabled(boolean skipSilenceEnabled) { public void setSkipSilenceEnabled(boolean skipSilenceEnabled) {
setPlaybackSpeedAndSkipSilence(getPlaybackSpeed(), skipSilenceEnabled); setAudioProcessorPlaybackSpeedAndSkipSilence(
getAudioProcessorPlaybackSpeed(), skipSilenceEnabled);
} }
@Override @Override
...@@ -1147,7 +1166,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -1147,7 +1166,7 @@ public final class DefaultAudioSink implements AudioSink {
framesPerEncodedSample = 0; framesPerEncodedSample = 0;
mediaPositionParameters = mediaPositionParameters =
new MediaPositionParameters( new MediaPositionParameters(
getPlaybackSpeed(), getAudioProcessorPlaybackSpeed(),
getSkipSilenceEnabled(), getSkipSilenceEnabled(),
/* mediaTimeUs= */ 0, /* mediaTimeUs= */ 0,
/* audioTrackPositionUs= */ 0); /* audioTrackPositionUs= */ 0);
...@@ -1183,7 +1202,28 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -1183,7 +1202,28 @@ public final class DefaultAudioSink implements AudioSink {
}.start(); }.start();
} }
private void setPlaybackSpeedAndSkipSilence(float playbackSpeed, boolean skipSilence) { @RequiresApi(23)
private void setAudioTrackPlaybackSpeedV23(float audioTrackPlaybackSpeed) {
if (isInitialized()) {
PlaybackParams playbackParams =
new PlaybackParams()
.allowDefaults()
.setSpeed(audioTrackPlaybackSpeed)
.setAudioFallbackMode(PlaybackParams.AUDIO_FALLBACK_MODE_FAIL);
try {
audioTrack.setPlaybackParams(playbackParams);
} catch (IllegalArgumentException e) {
Log.w(TAG, "Failed to set playback params", e);
}
// Update the speed using the actual effective speed from the audio track.
audioTrackPlaybackSpeed = audioTrack.getPlaybackParams().getSpeed();
audioTrackPositionTracker.setAudioTrackPlaybackSpeed(audioTrackPlaybackSpeed);
}
this.audioTrackPlaybackSpeed = audioTrackPlaybackSpeed;
}
private void setAudioProcessorPlaybackSpeedAndSkipSilence(
float playbackSpeed, boolean skipSilence) {
MediaPositionParameters currentMediaPositionParameters = getMediaPositionParameters(); MediaPositionParameters currentMediaPositionParameters = getMediaPositionParameters();
if (playbackSpeed != currentMediaPositionParameters.playbackSpeed if (playbackSpeed != currentMediaPositionParameters.playbackSpeed
|| skipSilence != currentMediaPositionParameters.skipSilence) { || skipSilence != currentMediaPositionParameters.skipSilence) {
...@@ -1205,6 +1245,10 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -1205,6 +1245,10 @@ public final class DefaultAudioSink implements AudioSink {
} }
} }
private float getAudioProcessorPlaybackSpeed() {
return getMediaPositionParameters().playbackSpeed;
}
private MediaPositionParameters getMediaPositionParameters() { private MediaPositionParameters getMediaPositionParameters() {
// Mask the already set parameters. // Mask the already set parameters.
return afterDrainParameters != null return afterDrainParameters != null
...@@ -1214,10 +1258,10 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -1214,10 +1258,10 @@ public final class DefaultAudioSink implements AudioSink {
: mediaPositionParameters; : mediaPositionParameters;
} }
private void applyPlaybackSpeedAndSkipSilence(long presentationTimeUs) { private void applyAudioProcessorPlaybackSpeedAndSkipSilence(long presentationTimeUs) {
float playbackSpeed = float playbackSpeed =
configuration.canApplyPlaybackParameters configuration.canApplyPlaybackParameters
? audioProcessorChain.applyPlaybackSpeed(getPlaybackSpeed()) ? audioProcessorChain.applyPlaybackSpeed(getAudioProcessorPlaybackSpeed())
: DEFAULT_PLAYBACK_SPEED; : DEFAULT_PLAYBACK_SPEED;
boolean skipSilenceEnabled = boolean skipSilenceEnabled =
configuration.canApplyPlaybackParameters configuration.canApplyPlaybackParameters
......
...@@ -62,6 +62,7 @@ public final class DefaultAudioSinkTest { ...@@ -62,6 +62,7 @@ public final class DefaultAudioSinkTest {
AudioCapabilities.DEFAULT_AUDIO_CAPABILITIES, AudioCapabilities.DEFAULT_AUDIO_CAPABILITIES,
new DefaultAudioSink.DefaultAudioProcessorChain(teeAudioProcessor), new DefaultAudioSink.DefaultAudioProcessorChain(teeAudioProcessor),
/* enableFloatOutput= */ false, /* enableFloatOutput= */ false,
/* enableAudioTrackPlaybackParams= */ false,
/* enableOffload= */ false); /* enableOffload= */ false);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment