Commit 10f142b3 by andrewlewis Committed by Oliver Woodman

Add AudioProcessor.AudioFormat

Issue: #6601
PiperOrigin-RevId: 282515179
parent b7000e64
......@@ -18,7 +18,6 @@ package com.google.android.exoplayer2.ext.gvr;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlayerLibraryInfo;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.util.Assertions;
import com.google.vr.sdk.audio.GvrAudioSurround;
......@@ -44,8 +43,7 @@ public final class GvrAudioProcessor implements AudioProcessor {
private static final int OUTPUT_FRAME_SIZE = OUTPUT_CHANNEL_COUNT * 2; // 16-bit stereo output.
private static final int NO_SURROUND_FORMAT = GvrAudioSurround.SurroundFormat.INVALID;
private int sampleRateHz;
private int channelCount;
private AudioFormat inputAudioFormat;
private int pendingGvrAudioSurroundFormat;
@Nullable private GvrAudioSurround gvrAudioSurround;
private ByteBuffer buffer;
......@@ -60,8 +58,7 @@ public final class GvrAudioProcessor implements AudioProcessor {
public GvrAudioProcessor() {
// Use the identity for the initial orientation.
w = 1f;
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
inputAudioFormat = AudioFormat.NOT_SET;
buffer = EMPTY_BUFFER;
pendingGvrAudioSurroundFormat = NO_SURROUND_FORMAT;
}
......@@ -87,15 +84,13 @@ public final class GvrAudioProcessor implements AudioProcessor {
@SuppressWarnings("ReferenceEquality")
@Override
public synchronized void configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws UnhandledFormatException {
if (encoding != C.ENCODING_PCM_16BIT) {
public synchronized AudioFormat configure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) {
maybeReleaseGvrAudioSurround();
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
throw new UnhandledAudioFormatException(inputAudioFormat);
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
switch (channelCount) {
switch (inputAudioFormat.channelCount) {
case 1:
pendingGvrAudioSurroundFormat = GvrAudioSurround.SurroundFormat.SURROUND_MONO;
break;
......@@ -115,12 +110,14 @@ public final class GvrAudioProcessor implements AudioProcessor {
pendingGvrAudioSurroundFormat = GvrAudioSurround.SurroundFormat.THIRD_ORDER_AMBISONICS;
break;
default:
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
throw new UnhandledAudioFormatException(inputAudioFormat);
}
if (buffer == EMPTY_BUFFER) {
buffer = ByteBuffer.allocateDirect(FRAMES_PER_OUTPUT_BUFFER * OUTPUT_FRAME_SIZE)
.order(ByteOrder.nativeOrder());
}
this.inputAudioFormat = inputAudioFormat;
return new AudioFormat(inputAudioFormat.sampleRate, OUTPUT_CHANNEL_COUNT, C.ENCODING_PCM_16BIT);
}
@Override
......@@ -129,21 +126,6 @@ public final class GvrAudioProcessor implements AudioProcessor {
}
@Override
public int getOutputChannelCount() {
return OUTPUT_CHANNEL_COUNT;
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
}
@Override
public int getOutputSampleRateHz() {
return sampleRateHz;
}
@Override
public void queueInput(ByteBuffer input) {
int position = input.position();
Assertions.checkNotNull(gvrAudioSurround);
......@@ -181,7 +163,10 @@ public final class GvrAudioProcessor implements AudioProcessor {
maybeReleaseGvrAudioSurround();
gvrAudioSurround =
new GvrAudioSurround(
pendingGvrAudioSurroundFormat, sampleRateHz, channelCount, FRAMES_PER_OUTPUT_BUFFER);
pendingGvrAudioSurroundFormat,
inputAudioFormat.sampleRate,
inputAudioFormat.channelCount,
FRAMES_PER_OUTPUT_BUFFER);
gvrAudioSurround.updateNativeOrientation(w, x, y, z);
pendingGvrAudioSurroundFormat = NO_SURROUND_FORMAT;
} else if (gvrAudioSurround != null) {
......@@ -195,8 +180,7 @@ public final class GvrAudioProcessor implements AudioProcessor {
maybeReleaseGvrAudioSurround();
updateOrientation(/* w= */ 1f, /* x= */ 0f, /* y= */ 0f, /* z= */ 0f);
inputEnded = false;
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
inputAudioFormat = AudioFormat.NOT_SET;
buffer = EMPTY_BUFFER;
pendingGvrAudioSurroundFormat = NO_SURROUND_FORMAT;
}
......
......@@ -16,6 +16,8 @@
package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
......@@ -23,24 +25,56 @@ import java.nio.ByteOrder;
* Interface for audio processors, which take audio data as input and transform it, potentially
* modifying its channel count, encoding and/or sample rate.
*
* <p>Call {@link #configure(int, int, int)} to configure the processor to receive input audio, then
* call {@link #isActive()} to determine whether the processor is active in the new configuration.
* {@link #queueInput(ByteBuffer)}, {@link #getOutputChannelCount()}, {@link #getOutputEncoding()}
* and {@link #getOutputSampleRateHz()} may only be called if the processor is active. Call {@link
* #reset()} to reset the processor to its unconfigured state and release any resources.
*
* <p>In addition to being able to modify the format of audio, implementations may allow parameters
* to be set that affect the output audio and whether the processor is active/inactive.
*/
public interface AudioProcessor {
/** PCM audio format that may be handled by an audio processor. */
final class AudioFormat {
public static final AudioFormat NOT_SET =
new AudioFormat(
/* sampleRate= */ Format.NO_VALUE,
/* channelCount= */ Format.NO_VALUE,
/* encoding= */ Format.NO_VALUE);
/** The sample rate in Hertz. */
public final int sampleRate;
/** The number of interleaved channels. */
public final int channelCount;
/** The type of linear PCM encoding. */
@C.PcmEncoding public final int encoding;
/** The number of bytes used to represent one audio frame. */
public final int bytesPerFrame;
public AudioFormat(int sampleRate, int channelCount, @C.PcmEncoding int encoding) {
this.sampleRate = sampleRate;
this.channelCount = channelCount;
this.encoding = encoding;
bytesPerFrame =
Util.isEncodingLinearPcm(encoding)
? Util.getPcmFrameSize(encoding, channelCount)
: Format.NO_VALUE;
}
@Override
public String toString() {
return "AudioFormat["
+ "sampleRate="
+ sampleRate
+ ", channelCount="
+ channelCount
+ ", encoding="
+ encoding
+ ']';
}
}
/** Exception thrown when a processor can't be configured for a given input audio format. */
final class UnhandledFormatException extends Exception {
final class UnhandledAudioFormatException extends Exception {
public UnhandledFormatException(
int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) {
super("Unhandled format: " + sampleRateHz + " Hz, " + channelCount + " channels in encoding "
+ encoding);
public UnhandledAudioFormatException(AudioFormat inputAudioFormat) {
super("Unhandled format: " + inputAudioFormat);
}
}
......@@ -50,46 +84,24 @@ public interface AudioProcessor {
/**
* Configures the processor to process input audio with the specified format. After calling this
* method, call {@link #isActive()} to determine whether the audio processor is active.
*
* <p>If the audio processor is active after configuration, call {@link #getOutputSampleRateHz()},
* {@link #getOutputChannelCount()} and {@link #getOutputEncoding()} to get its new output format.
* method, call {@link #isActive()} to determine whether the audio processor is active. Returns
* the configured output audio format if this instance is active.
*
* <p>After calling this method, it is necessary to {@link #flush()} the processor to apply the
* new configuration before queueing more data. You can (optionally) first drain output in the
* previous configuration by calling {@link #queueEndOfStream()} and {@link #getOutput()}.
*
* @param sampleRateHz The sample rate of input audio in Hz.
* @param channelCount The number of interleaved channels in input audio.
* @param encoding The encoding of input audio.
* @throws UnhandledFormatException Thrown if the specified format can't be handled as input.
* @param inputAudioFormat The format of audio that will be queued after the next call to {@link
* #flush()}.
* @return The configured output audio format if this instance is {@link #isActive() active}.
* @throws UnhandledAudioFormatException Thrown if the specified format can't be handled as input.
*/
void configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding)
throws UnhandledFormatException;
AudioFormat configure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException;
/** Returns whether the processor is configured and will process input buffers. */
boolean isActive();
/**
* Returns the number of audio channels in the data output by the processor. The value may change
* as a result of calling {@link #configure(int, int, int)}.
*/
int getOutputChannelCount();
/**
* Returns the audio encoding used in the data output by the processor. The value may change as a
* result of calling {@link #configure(int, int, int)}.
*/
@C.PcmEncoding
int getOutputEncoding();
/**
* Returns the sample rate of audio output by the processor, in hertz. The value may change as a
* result of calling {@link #configure(int, int, int)}.
*/
int getOutputSampleRateHz();
/**
* Queues audio data between the position and limit of the input {@code buffer} for processing.
* {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as
* read-only. Its position will be advanced by the number of bytes consumed (which may be zero).
......@@ -130,6 +142,6 @@ public interface AudioProcessor {
*/
void flush();
/** Resets the processor to its unconfigured state. */
/** Resets the processor to its unconfigured state, releasing any resources. */
void reset();
}
......@@ -16,24 +16,20 @@
package com.google.android.exoplayer2.audio;
import androidx.annotation.CallSuper;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Base class for audio processors that keep an output buffer and an internal buffer that is reused
* whenever input is queued.
* whenever input is queued. Subclasses should override {@link #onConfigure(AudioFormat)} to return
* the output audio format for the processor if it's active.
*/
public abstract class BaseAudioProcessor implements AudioProcessor {
/** The configured input sample rate, in Hertz, or {@link Format#NO_VALUE} if not configured. */
protected int sampleRateHz;
/** The configured input channel count, or {@link Format#NO_VALUE} if not configured. */
protected int channelCount;
/** The configured input encoding, or {@link Format#NO_VALUE} if not configured. */
@C.PcmEncoding protected int encoding;
/** The configured input audio format. */
protected AudioFormat inputAudioFormat;
private AudioFormat outputAudioFormat;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private boolean inputEnded;
......@@ -41,29 +37,21 @@ public abstract class BaseAudioProcessor implements AudioProcessor {
public BaseAudioProcessor() {
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
encoding = Format.NO_VALUE;
inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
}
@Override
public boolean isActive() {
return sampleRateHz != Format.NO_VALUE;
}
@Override
public int getOutputChannelCount() {
return channelCount;
}
@Override
public int getOutputEncoding() {
return encoding;
public final AudioFormat configure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
this.inputAudioFormat = inputAudioFormat;
outputAudioFormat = onConfigure(inputAudioFormat);
return isActive() ? outputAudioFormat : AudioFormat.NOT_SET;
}
@Override
public int getOutputSampleRateHz() {
return sampleRateHz;
public boolean isActive() {
return outputAudioFormat != AudioFormat.NOT_SET;
}
@Override
......@@ -98,20 +86,11 @@ public abstract class BaseAudioProcessor implements AudioProcessor {
public final void reset() {
flush();
buffer = EMPTY_BUFFER;
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
encoding = Format.NO_VALUE;
inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
onReset();
}
/** Sets the input format of this processor. */
protected final void setInputFormat(
int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) {
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
this.encoding = encoding;
}
/**
* Replaces the current output buffer with a buffer of at least {@code count} bytes and returns
* it. Callers should write to the returned buffer then {@link ByteBuffer#flip()} it so it can be
......@@ -132,6 +111,12 @@ public abstract class BaseAudioProcessor implements AudioProcessor {
return outputBuffer.hasRemaining();
}
/** Called when the processor is configured for a new input format. */
protected AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
return AudioFormat.NOT_SET;
}
/** Called when the end-of-stream is queued to the processor. */
protected void onQueueEndOfStream() {
// Do nothing.
......
......@@ -24,19 +24,17 @@ import java.nio.ByteBuffer;
* An {@link AudioProcessor} that applies a mapping from input channels onto specified output
* channels. This can be used to reorder, duplicate or discard channels.
*/
/* package */
// the constructor does not initialize fields: pendingOutputChannels, outputChannels
@SuppressWarnings("nullness:initialization.fields.uninitialized")
final class ChannelMappingAudioProcessor extends BaseAudioProcessor {
/* package */ final class ChannelMappingAudioProcessor extends BaseAudioProcessor {
@Nullable private int[] pendingOutputChannels;
private boolean active;
@Nullable private int[] outputChannels;
/**
* Resets the channel mapping. After calling this method, call {@link #configure(int, int, int)}
* to start using the new channel map.
* Resets the channel mapping. After calling this method, call {@link #configure(AudioFormat)} to
* start using the new channel map.
*
* @param outputChannels The mapping from input to output channel indices, or {@code null} to
* leave the input unchanged.
......@@ -47,38 +45,30 @@ final class ChannelMappingAudioProcessor extends BaseAudioProcessor {
}
@Override
public void configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding)
throws UnhandledFormatException {
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
outputChannels = pendingOutputChannels;
int[] outputChannels = this.outputChannels;
if (outputChannels == null) {
active = false;
return;
return AudioFormat.NOT_SET;
}
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
setInputFormat(sampleRateHz, channelCount, encoding);
active = channelCount != outputChannels.length;
boolean active = inputAudioFormat.channelCount != outputChannels.length;
for (int i = 0; i < outputChannels.length; i++) {
int channelIndex = outputChannels[i];
if (channelIndex >= channelCount) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
if (channelIndex >= inputAudioFormat.channelCount) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
active |= (channelIndex != i);
}
}
@Override
public boolean isActive() {
return active;
}
@Override
public int getOutputChannelCount() {
return outputChannels == null ? channelCount : outputChannels.length;
return active
? new AudioFormat(inputAudioFormat.sampleRate, outputChannels.length, C.ENCODING_PCM_16BIT)
: AudioFormat.NOT_SET;
}
@Override
......@@ -86,14 +76,14 @@ final class ChannelMappingAudioProcessor extends BaseAudioProcessor {
int[] outputChannels = Assertions.checkNotNull(this.outputChannels);
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int frameCount = (limit - position) / (2 * channelCount);
int frameCount = (limit - position) / (2 * inputAudioFormat.channelCount);
int outputSize = frameCount * outputChannels.length * 2;
ByteBuffer buffer = replaceOutputBuffer(outputSize);
while (position < limit) {
for (int channelIndex : outputChannels) {
buffer.putShort(inputBuffer.getShort(position + 2 * channelIndex));
}
position += channelCount * 2;
position += inputAudioFormat.channelCount * 2;
}
inputBuffer.position(limit);
buffer.flip();
......@@ -103,7 +93,6 @@ final class ChannelMappingAudioProcessor extends BaseAudioProcessor {
protected void onReset() {
outputChannels = null;
pendingOutputChannels = null;
active = false;
}
}
......@@ -27,6 +27,7 @@ import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.audio.AudioProcessor.UnhandledAudioFormatException;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.Util;
......@@ -435,18 +436,22 @@ public final class DefaultAudioSink implements AudioSink {
if (processingEnabled) {
trimmingAudioProcessor.setTrimFrameCount(trimStartFrames, trimEndFrames);
channelMappingAudioProcessor.setChannelMap(outputChannels);
AudioProcessor.AudioFormat inputAudioFormat =
new AudioProcessor.AudioFormat(sampleRate, channelCount, encoding);
AudioProcessor.AudioFormat outputAudioFormat = inputAudioFormat;
for (AudioProcessor audioProcessor : availableAudioProcessors) {
try {
audioProcessor.configure(sampleRate, channelCount, encoding);
} catch (AudioProcessor.UnhandledFormatException e) {
outputAudioFormat = audioProcessor.configure(inputAudioFormat);
} catch (UnhandledAudioFormatException e) {
throw new ConfigurationException(e);
}
if (audioProcessor.isActive()) {
channelCount = audioProcessor.getOutputChannelCount();
sampleRate = audioProcessor.getOutputSampleRateHz();
encoding = audioProcessor.getOutputEncoding();
inputAudioFormat = outputAudioFormat;
}
}
sampleRate = outputAudioFormat.sampleRate;
channelCount = outputAudioFormat.channelCount;
encoding = outputAudioFormat.encoding;
}
int outputChannelConfig = getChannelConfig(channelCount, isInputPcm);
......
......@@ -16,6 +16,7 @@
package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
......@@ -29,27 +30,21 @@ import java.nio.ByteBuffer;
private static final double PCM_32_BIT_INT_TO_PCM_32_BIT_FLOAT_FACTOR = 1.0 / 0x7FFFFFFF;
@Override
public void configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding)
throws UnhandledFormatException {
if (!Util.isEncodingHighResolutionIntegerPcm(encoding)) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
if (!Util.isEncodingHighResolutionIntegerPcm(inputAudioFormat.encoding)) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
setInputFormat(sampleRateHz, channelCount, encoding);
}
@Override
public boolean isActive() {
return Util.isEncodingHighResolutionIntegerPcm(encoding);
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_FLOAT;
return Util.isEncodingHighResolutionIntegerPcm(inputAudioFormat.encoding)
? new AudioFormat(
inputAudioFormat.sampleRate, inputAudioFormat.channelCount, C.ENCODING_PCM_FLOAT)
: AudioFormat.NOT_SET;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
boolean isInput32Bit = encoding == C.ENCODING_PCM_32BIT;
Assertions.checkState(Util.isEncodingHighResolutionIntegerPcm(inputAudioFormat.encoding));
boolean isInput32Bit = inputAudioFormat.encoding == C.ENCODING_PCM_32BIT;
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int size = limit - position;
......@@ -65,7 +60,7 @@ import java.nio.ByteBuffer;
| ((inputBuffer.get(i + 3) & 0xFF) << 24);
writePcm32BitFloat(pcm32BitInteger, buffer);
}
} else {
} else { // Input is 24-bit PCM.
for (int i = position; i < limit; i += 3) {
int pcm32BitInteger =
((inputBuffer.get(i) & 0xFF) << 8)
......
......@@ -26,23 +26,17 @@ import java.nio.ByteBuffer;
/* package */ final class ResamplingAudioProcessor extends BaseAudioProcessor {
@Override
public void configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding)
throws UnhandledFormatException {
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
@C.PcmEncoding int encoding = inputAudioFormat.encoding;
if (encoding != C.ENCODING_PCM_8BIT && encoding != C.ENCODING_PCM_16BIT
&& encoding != C.ENCODING_PCM_24BIT && encoding != C.ENCODING_PCM_32BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
throw new UnhandledAudioFormatException(inputAudioFormat);
}
setInputFormat(sampleRateHz, channelCount, encoding);
}
@Override
public boolean isActive() {
return encoding != C.ENCODING_INVALID && encoding != C.ENCODING_PCM_16BIT;
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
return encoding != C.ENCODING_PCM_16BIT
? new AudioFormat(
inputAudioFormat.sampleRate, inputAudioFormat.channelCount, C.ENCODING_PCM_16BIT)
: AudioFormat.NOT_SET;
}
@Override
......@@ -52,7 +46,7 @@ import java.nio.ByteBuffer;
int limit = inputBuffer.limit();
int size = limit - position;
int resampledSize;
switch (encoding) {
switch (inputAudioFormat.encoding) {
case C.ENCODING_PCM_8BIT:
resampledSize = size * 2;
break;
......@@ -74,7 +68,7 @@ import java.nio.ByteBuffer;
// Resample the little endian input and update the input/output buffers.
ByteBuffer buffer = replaceOutputBuffer(resampledSize);
switch (encoding) {
switch (inputAudioFormat.encoding) {
case C.ENCODING_PCM_8BIT:
// 8->16 bit resampling. Shift each byte from [0, 256) to [-128, 128) and scale up.
for (int i = position; i < limit; i++) {
......
......@@ -119,18 +119,17 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
// AudioProcessor implementation.
@Override
public void configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding)
throws UnhandledFormatException {
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
bytesPerFrame = channelCount * 2;
setInputFormat(sampleRateHz, channelCount, encoding);
return enabled ? inputAudioFormat : AudioFormat.NOT_SET;
}
@Override
public boolean isActive() {
return super.isActive() && enabled;
return enabled;
}
@Override
......@@ -165,7 +164,8 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
@Override
protected void onFlush() {
if (isActive()) {
if (enabled) {
bytesPerFrame = inputAudioFormat.bytesPerFrame;
int maybeSilenceBufferSize = durationUsToFrames(MINIMUM_SILENCE_DURATION_US) * bytesPerFrame;
if (maybeSilenceBuffer.length != maybeSilenceBufferSize) {
maybeSilenceBuffer = new byte[maybeSilenceBufferSize];
......@@ -317,7 +317,7 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
* Returns the number of input frames corresponding to {@code durationUs} microseconds of audio.
*/
private int durationUsToFrames(long durationUs) {
return (int) ((durationUs * sampleRateHz) / C.MICROS_PER_SECOND);
return (int) ((durationUs * inputAudioFormat.sampleRate) / C.MICROS_PER_SECOND);
}
/**
......
......@@ -17,7 +17,6 @@ package com.google.android.exoplayer2.audio;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.Encoding;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
......@@ -62,12 +61,12 @@ public final class SonicAudioProcessor implements AudioProcessor {
*/
private static final int MIN_BYTES_FOR_SPEEDUP_CALCULATION = 1024;
private int channelCount;
private int sampleRateHz;
private int pendingOutputSampleRate;
private float speed;
private float pitch;
private int outputSampleRateHz;
private int pendingOutputSampleRateHz;
private AudioFormat inputAudioFormat;
private AudioFormat outputAudioFormat;
private boolean pendingSonicRecreation;
@Nullable private Sonic sonic;
......@@ -84,13 +83,12 @@ public final class SonicAudioProcessor implements AudioProcessor {
public SonicAudioProcessor() {
speed = 1f;
pitch = 1f;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
outputSampleRateHz = Format.NO_VALUE;
inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
pendingOutputSampleRate = SAMPLE_RATE_NO_CHANGE;
}
/**
......@@ -129,14 +127,14 @@ public final class SonicAudioProcessor implements AudioProcessor {
/**
* Sets the sample rate for output audio, in hertz. Pass {@link #SAMPLE_RATE_NO_CHANGE} to output
* audio at the same sample rate as the input. After calling this method, call
* {@link #configure(int, int, int)} to start using the new sample rate.
* audio at the same sample rate as the input. After calling this method, call {@link
* #configure(AudioFormat)} to start using the new sample rate.
*
* @param sampleRateHz The sample rate for output audio, in hertz.
* @see #configure(int, int, int)
* @see #configure(AudioFormat)
*/
public void setOutputSampleRateHz(int sampleRateHz) {
pendingOutputSampleRateHz = sampleRateHz;
pendingOutputSampleRate = sampleRateHz;
}
/**
......@@ -149,50 +147,39 @@ public final class SonicAudioProcessor implements AudioProcessor {
*/
public long scaleDurationForSpeedup(long duration) {
if (outputBytes >= MIN_BYTES_FOR_SPEEDUP_CALCULATION) {
return outputSampleRateHz == sampleRateHz
return outputAudioFormat.sampleRate == inputAudioFormat.sampleRate
? Util.scaleLargeTimestamp(duration, inputBytes, outputBytes)
: Util.scaleLargeTimestamp(duration, inputBytes * outputSampleRateHz,
outputBytes * sampleRateHz);
: Util.scaleLargeTimestamp(
duration,
inputBytes * outputAudioFormat.sampleRate,
outputBytes * inputAudioFormat.sampleRate);
} else {
return (long) ((double) speed * duration);
}
}
@Override
public void configure(int sampleRateHz, int channelCount, @Encoding int encoding)
throws UnhandledFormatException {
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
public AudioFormat configure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException {
if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
int outputSampleRateHz = pendingOutputSampleRateHz == SAMPLE_RATE_NO_CHANGE
? sampleRateHz : pendingOutputSampleRateHz;
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
this.outputSampleRateHz = outputSampleRateHz;
int outputSampleRateHz =
pendingOutputSampleRate == SAMPLE_RATE_NO_CHANGE
? inputAudioFormat.sampleRate
: pendingOutputSampleRate;
this.inputAudioFormat = inputAudioFormat;
this.outputAudioFormat =
new AudioFormat(outputSampleRateHz, inputAudioFormat.channelCount, C.ENCODING_PCM_16BIT);
pendingSonicRecreation = true;
return outputAudioFormat;
}
@Override
public boolean isActive() {
return sampleRateHz != Format.NO_VALUE
return outputAudioFormat.sampleRate != Format.NO_VALUE
&& (Math.abs(speed - 1f) >= CLOSE_THRESHOLD
|| Math.abs(pitch - 1f) >= CLOSE_THRESHOLD
|| outputSampleRateHz != sampleRateHz);
}
@Override
public int getOutputChannelCount() {
return channelCount;
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
}
@Override
public int getOutputSampleRateHz() {
return outputSampleRateHz;
|| outputAudioFormat.sampleRate != inputAudioFormat.sampleRate);
}
@Override
......@@ -245,7 +232,13 @@ public final class SonicAudioProcessor implements AudioProcessor {
public void flush() {
if (isActive()) {
if (pendingSonicRecreation) {
sonic = new Sonic(sampleRateHz, channelCount, speed, pitch, outputSampleRateHz);
sonic =
new Sonic(
inputAudioFormat.sampleRate,
inputAudioFormat.channelCount,
speed,
pitch,
outputAudioFormat.sampleRate);
} else if (sonic != null) {
sonic.flush();
}
......@@ -260,13 +253,12 @@ public final class SonicAudioProcessor implements AudioProcessor {
public void reset() {
speed = 1f;
pitch = 1f;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
outputSampleRateHz = Format.NO_VALUE;
inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
pendingOutputSampleRate = SAMPLE_RATE_NO_CHANGE;
pendingSonicRecreation = false;
sonic = null;
inputBytes = 0;
......
......@@ -64,8 +64,9 @@ public final class TeeAudioProcessor extends BaseAudioProcessor {
}
@Override
public void configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) {
setInputFormat(sampleRateHz, channelCount, encoding);
public AudioFormat onConfigure(AudioFormat inputAudioFormat) {
// This processor is always active (if passed to the sink) and outputs its input.
return inputAudioFormat;
}
@Override
......@@ -81,7 +82,8 @@ public final class TeeAudioProcessor extends BaseAudioProcessor {
@Override
protected void onFlush() {
if (isActive()) {
audioBufferSink.flush(sampleRateHz, channelCount, encoding);
audioBufferSink.flush(
inputAudioFormat.sampleRate, inputAudioFormat.channelCount, inputAudioFormat.encoding);
}
}
......
......@@ -24,7 +24,6 @@ import java.nio.ByteBuffer;
@C.PcmEncoding private static final int OUTPUT_ENCODING = C.ENCODING_PCM_16BIT;
private boolean isActive;
private int trimStartFrames;
private int trimEndFrames;
private int bytesPerFrame;
......@@ -42,7 +41,7 @@ import java.nio.ByteBuffer;
/**
* Sets the number of audio frames to trim from the start and end of audio passed to this
* processor. After calling this method, call {@link #configure(int, int, int)} to apply the new
* processor. After calling this method, call {@link #configure(AudioFormat)} to apply the new
* trimming frame counts.
*
* @param trimStartFrames The number of audio frames to trim from the start of audio.
......@@ -68,26 +67,20 @@ import java.nio.ByteBuffer;
}
@Override
public void configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding)
throws UnhandledFormatException {
if (encoding != OUTPUT_ENCODING) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
if (inputAudioFormat.encoding != OUTPUT_ENCODING) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
if (endBufferSize > 0) {
trimmedFrameCount += endBufferSize / bytesPerFrame;
}
bytesPerFrame = Util.getPcmFrameSize(OUTPUT_ENCODING, channelCount);
bytesPerFrame = inputAudioFormat.bytesPerFrame;
endBuffer = new byte[trimEndFrames * bytesPerFrame];
endBufferSize = 0;
pendingTrimStartBytes = trimStartFrames * bytesPerFrame;
isActive = trimStartFrames != 0 || trimEndFrames != 0;
receivedInputSinceConfigure = false;
setInputFormat(sampleRateHz, channelCount, encoding);
}
@Override
public boolean isActive() {
return isActive;
return trimStartFrames != 0 || trimEndFrames != 0 ? inputAudioFormat : AudioFormat.NOT_SET;
}
@Override
......@@ -140,7 +133,6 @@ import java.nio.ByteBuffer;
buffer.flip();
}
@SuppressWarnings("ReferenceEquality")
@Override
public ByteBuffer getOutput() {
if (super.isEnded() && endBufferSize > 0) {
......@@ -155,7 +147,6 @@ import java.nio.ByteBuffer;
return super.getOutput();
}
@SuppressWarnings("ReferenceEquality")
@Override
public boolean isEnded() {
return super.isEnded() && endBufferSize == 0;
......
......@@ -20,6 +20,8 @@ import static org.junit.Assert.fail;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.audio.AudioProcessor.AudioFormat;
import com.google.android.exoplayer2.audio.AudioProcessor.UnhandledAudioFormatException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
......@@ -28,6 +30,16 @@ import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
public final class SonicAudioProcessorTest {
private static final AudioFormat AUDIO_FORMAT_22050_HZ =
new AudioFormat(
/* sampleRate= */ 22050, /* channelCount= */ 2, /* encoding= */ C.ENCODING_PCM_16BIT);
private static final AudioFormat AUDIO_FORMAT_44100_HZ =
new AudioFormat(
/* sampleRate= */ 44100, /* channelCount= */ 2, /* encoding= */ C.ENCODING_PCM_16BIT);
private static final AudioFormat AUDIO_FORMAT_48000_HZ =
new AudioFormat(
/* sampleRate= */ 48000, /* channelCount= */ 2, /* encoding= */ C.ENCODING_PCM_16BIT);
private SonicAudioProcessor sonicAudioProcessor;
@Before
......@@ -39,59 +51,36 @@ public final class SonicAudioProcessorTest {
public void testReconfigureWithSameSampleRate() throws Exception {
// When configured for resampling from 44.1 kHz to 48 kHz, the output sample rate is correct.
sonicAudioProcessor.setOutputSampleRateHz(48000);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
AudioFormat outputAudioFormat = sonicAudioProcessor.configure(AUDIO_FORMAT_44100_HZ);
assertThat(sonicAudioProcessor.isActive()).isTrue();
assertThat(outputAudioFormat.sampleRate).isEqualTo(48000);
// When reconfigured with 48 kHz input, there is no resampling.
sonicAudioProcessor.configure(48000, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
outputAudioFormat = sonicAudioProcessor.configure(AUDIO_FORMAT_48000_HZ);
assertThat(sonicAudioProcessor.isActive()).isFalse();
assertThat(outputAudioFormat.sampleRate).isEqualTo(48000);
// When reconfigure with 44.1 kHz input, resampling is enabled again.
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
outputAudioFormat = sonicAudioProcessor.configure(AUDIO_FORMAT_44100_HZ);
assertThat(sonicAudioProcessor.isActive()).isTrue();
assertThat(outputAudioFormat.sampleRate).isEqualTo(48000);
}
@Test
public void testNoSampleRateChange() throws Exception {
// Configure for resampling 44.1 kHz to 48 kHz.
sonicAudioProcessor.setOutputSampleRateHz(48000);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
sonicAudioProcessor.configure(AUDIO_FORMAT_44100_HZ);
assertThat(sonicAudioProcessor.isActive()).isTrue();
// Reconfigure to not modify the sample rate.
sonicAudioProcessor.setOutputSampleRateHz(SonicAudioProcessor.SAMPLE_RATE_NO_CHANGE);
sonicAudioProcessor.configure(22050, 2, C.ENCODING_PCM_16BIT);
sonicAudioProcessor.configure(AUDIO_FORMAT_22050_HZ);
// The sample rate is unmodified, and the audio processor is not active.
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(22050);
assertThat(sonicAudioProcessor.isActive()).isFalse();
}
@Test
public void testBecomesActiveAfterConfigure() throws Exception {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
// Set a new sample rate.
sonicAudioProcessor.setOutputSampleRateHz(22050);
// The new sample rate is not active yet.
assertThat(sonicAudioProcessor.isActive()).isFalse();
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(44100);
}
@Test
public void testSampleRateChangeBecomesActiveAfterConfigure() throws Exception {
// Configure for resampling 44.1 kHz to 48 kHz.
sonicAudioProcessor.setOutputSampleRateHz(48000);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
// Set a new sample rate, which isn't active yet.
sonicAudioProcessor.setOutputSampleRateHz(22050);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
// The new sample rate takes effect on reconfiguration.
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(22050);
}
@Test
public void testIsActiveWithSpeedChange() throws Exception {
sonicAudioProcessor.setSpeed(1.5f);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
sonicAudioProcessor.configure(AUDIO_FORMAT_44100_HZ);
sonicAudioProcessor.flush();
assertThat(sonicAudioProcessor.isActive()).isTrue();
}
......@@ -99,35 +88,45 @@ public final class SonicAudioProcessorTest {
@Test
public void testIsActiveWithPitchChange() throws Exception {
sonicAudioProcessor.setPitch(1.5f);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
sonicAudioProcessor.configure(AUDIO_FORMAT_44100_HZ);
sonicAudioProcessor.flush();
assertThat(sonicAudioProcessor.isActive()).isTrue();
}
@Test
public void testIsNotActiveWithNoChange() throws Exception {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
sonicAudioProcessor.configure(AUDIO_FORMAT_44100_HZ);
assertThat(sonicAudioProcessor.isActive()).isFalse();
}
@Test
public void testDoesNotSupportNon16BitInput() throws Exception {
try {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_8BIT);
sonicAudioProcessor.configure(
new AudioFormat(
/* sampleRate= */ 44100, /* channelCount= */ 2, /* encoding= */ C.ENCODING_PCM_8BIT));
fail();
} catch (AudioProcessor.UnhandledFormatException e) {
} catch (UnhandledAudioFormatException e) {
// Expected.
}
try {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_24BIT);
sonicAudioProcessor.configure(
new AudioFormat(
/* sampleRate= */ 44100,
/* channelCount= */ 2,
/* encoding= */ C.ENCODING_PCM_24BIT));
fail();
} catch (AudioProcessor.UnhandledFormatException e) {
} catch (UnhandledAudioFormatException e) {
// Expected.
}
try {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_32BIT);
sonicAudioProcessor.configure(
new AudioFormat(
/* sampleRate= */ 44100,
/* channelCount= */ 2,
/* encoding= */ C.ENCODING_PCM_32BIT));
fail();
} catch (AudioProcessor.UnhandledFormatException e) {
} catch (UnhandledAudioFormatException e) {
// Expected.
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment