Commit ac31dc7c by andrewlewis Committed by Oliver Woodman

Allow setting output sample rate in SonicAudioProcessor

This is not really useful with the DefaultAudioSink, but could be used in a
custom AudioSink when mixing audio from sources that have different sample
rates.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=172434482
parent e46a7600
...@@ -139,6 +139,11 @@ public final class GvrAudioProcessor implements AudioProcessor { ...@@ -139,6 +139,11 @@ public final class GvrAudioProcessor implements AudioProcessor {
} }
@Override @Override
public int getOutputSampleRateHz() {
return sampleRateHz;
}
@Override
public void queueInput(ByteBuffer input) { public void queueInput(ByteBuffer input) {
int position = input.position(); int position = input.position();
int readBytes = gvrAudioSurround.addInput(input, position, input.limit() - position); int readBytes = gvrAudioSurround.addInput(input, position, input.limit() - position);
......
...@@ -20,7 +20,15 @@ import java.nio.ByteBuffer; ...@@ -20,7 +20,15 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
/** /**
* Interface for audio processors. * Interface for audio processors, which take audio data as input and transform it, potentially
* modifying its channel count, encoding and/or sample rate.
* <p>
* Call {@link #configure(int, int, int)} to configure the processor to receive input audio, then
* call {@link #isActive()} to determine whether the processor is active.
* {@link #queueInput(ByteBuffer)}, {@link #queueEndOfStream()}, {@link #getOutput()},
* {@link #isEnded()}, {@link #getOutputChannelCount()}, {@link #getOutputEncoding()} and
* {@link #getOutputSampleRateHz()} may only be called if the processor is active. Call
* {@link #reset()} to reset the processor to its unconfigured state.
*/ */
public interface AudioProcessor { public interface AudioProcessor {
...@@ -46,8 +54,9 @@ public interface AudioProcessor { ...@@ -46,8 +54,9 @@ public interface AudioProcessor {
* method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the * method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the
* processor will not accept any buffers until it is reconfigured. Returns {@code true} if the * processor will not accept any buffers until it is reconfigured. Returns {@code true} if the
* processor must be flushed, or if the value returned by {@link #isActive()} has changed as a * processor must be flushed, or if the value returned by {@link #isActive()} has changed as a
* result of the call. If it's active, {@link #getOutputChannelCount()} and * result of the call. If it's active, {@link #getOutputSampleRateHz()},
* {@link #getOutputEncoding()} return the processor's output format. * {@link #getOutputChannelCount()} and {@link #getOutputEncoding()} return the processor's output
* format.
* *
* @param sampleRateHz The sample rate of input audio in Hz. * @param sampleRateHz The sample rate of input audio in Hz.
* @param channelCount The number of interleaved channels in input audio. * @param channelCount The number of interleaved channels in input audio.
...@@ -65,17 +74,28 @@ public interface AudioProcessor { ...@@ -65,17 +74,28 @@ public interface AudioProcessor {
boolean isActive(); boolean isActive();
/** /**
* Returns the number of audio channels in the data output by the processor. * Returns the number of audio channels in the data output by the processor. The value may change
* as a result of calling {@link #configure(int, int, int)} and is undefined if the instance is
* not active.
*/ */
int getOutputChannelCount(); int getOutputChannelCount();
/** /**
* Returns the audio encoding used in the data output by the processor. * Returns the audio encoding used in the data output by the processor. The value may change as a
* result of calling {@link #configure(int, int, int)} and is undefined if the instance is not
* active.
*/ */
@C.Encoding @C.Encoding
int getOutputEncoding(); int getOutputEncoding();
/** /**
* Returns the sample rate of audio output by the processor, in hertz. The value may change as a
* result of calling {@link #configure(int, int, int)} and is undefined if the instance is not
* active.
*/
int getOutputSampleRateHz();
/**
* Queues audio data between the position and limit of the input {@code buffer} for processing. * Queues audio data between the position and limit of the input {@code buffer} for processing.
* {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as * {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as
* read-only. Its position will be advanced by the number of bytes consumed (which may be zero). * read-only. Its position will be advanced by the number of bytes consumed (which may be zero).
...@@ -116,7 +136,7 @@ public interface AudioProcessor { ...@@ -116,7 +136,7 @@ public interface AudioProcessor {
void flush(); void flush();
/** /**
* Resets the processor to its initial state. * Resets the processor to its unconfigured state.
*/ */
void reset(); void reset();
......
...@@ -186,12 +186,12 @@ public interface AudioSink { ...@@ -186,12 +186,12 @@ public interface AudioSink {
/** /**
* Configures (or reconfigures) the sink. * Configures (or reconfigures) the sink.
* *
* @param mimeType The MIME type of audio data provided in the input buffers. * @param inputMimeType The MIME type of audio data provided in the input buffers.
* @param channelCount The number of channels. * @param inputChannelCount The number of channels.
* @param sampleRate The sample rate in Hz. * @param inputSampleRate The sample rate in Hz.
* @param pcmEncoding For PCM formats, the encoding used. One of {@link C#ENCODING_PCM_16BIT}, * @param inputPcmEncoding For PCM formats, the encoding used. One of
* {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and * {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT}
* {@link C#ENCODING_PCM_32BIT}. * and {@link C#ENCODING_PCM_32BIT}.
* @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a * @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a
* suitable buffer size. * suitable buffer size.
* @param outputChannels A mapping from input to output channels that is applied to this sink's * @param outputChannels A mapping from input to output channels that is applied to this sink's
...@@ -206,9 +206,9 @@ public interface AudioSink { ...@@ -206,9 +206,9 @@ public interface AudioSink {
* {@link #configure(String, int, int, int, int, int[], int, int)}. * {@link #configure(String, int, int, int, int, int[], int, int)}.
* @throws ConfigurationException If an error occurs configuring the sink. * @throws ConfigurationException If an error occurs configuring the sink.
*/ */
void configure(String mimeType, int channelCount, int sampleRate, @C.PcmEncoding int pcmEncoding, void configure(String inputMimeType, int inputChannelCount, int inputSampleRate,
int specifiedBufferSize, @Nullable int[] outputChannels, int trimStartSamples, @C.PcmEncoding int inputPcmEncoding, int specifiedBufferSize, @Nullable int[] outputChannels,
int trimEndSamples) throws ConfigurationException; int trimStartSamples, int trimEndSamples) throws ConfigurationException;
/** /**
* Starts or resumes consuming audio if initialized. * Starts or resumes consuming audio if initialized.
......
...@@ -104,6 +104,11 @@ import java.util.Arrays; ...@@ -104,6 +104,11 @@ import java.util.Arrays;
} }
@Override @Override
public int getOutputSampleRateHz() {
return sampleRateHz;
}
@Override
public void queueInput(ByteBuffer inputBuffer) { public void queueInput(ByteBuffer inputBuffer) {
int position = inputBuffer.position(); int position = inputBuffer.position();
int limit = inputBuffer.limit(); int limit = inputBuffer.limit();
......
...@@ -149,13 +149,6 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -149,13 +149,6 @@ public final class DefaultAudioSink implements AudioSink {
private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000; private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000;
/** /**
* The minimum number of output bytes from {@link #sonicAudioProcessor} at which the speedup is
* calculated using the input/output byte counts from the processor, rather than using the
* current playback parameters speed.
*/
private static final int SONIC_MIN_BYTES_FOR_SPEEDUP = 1024;
/**
* Whether to enable a workaround for an issue where an audio effect does not keep its session * Whether to enable a workaround for an issue where an audio effect does not keep its session
* active across releasing/initializing a new audio track, on platform builds where * active across releasing/initializing a new audio track, on platform builds where
* {@link Util#SDK_INT} &lt; 21. * {@link Util#SDK_INT} &lt; 21.
...@@ -189,6 +182,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -189,6 +182,7 @@ public final class DefaultAudioSink implements AudioSink {
*/ */
private AudioTrack keepSessionIdAudioTrack; private AudioTrack keepSessionIdAudioTrack;
private AudioTrack audioTrack; private AudioTrack audioTrack;
private int inputSampleRate;
private int sampleRate; private int sampleRate;
private int channelConfig; private int channelConfig;
private @C.Encoding int encoding; private @C.Encoding int encoding;
...@@ -337,14 +331,18 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -337,14 +331,18 @@ public final class DefaultAudioSink implements AudioSink {
} }
@Override @Override
public void configure(String mimeType, int channelCount, int sampleRate, public void configure(String inputMimeType, int inputChannelCount, int inputSampleRate,
@C.PcmEncoding int pcmEncoding, int specifiedBufferSize, @Nullable int[] outputChannels, @C.PcmEncoding int inputPcmEncoding, int specifiedBufferSize, @Nullable int[] outputChannels,
int trimStartSamples, int trimEndSamples) throws ConfigurationException { int trimStartSamples, int trimEndSamples) throws ConfigurationException {
boolean passthrough = !MimeTypes.AUDIO_RAW.equals(mimeType); this.inputSampleRate = inputSampleRate;
@C.Encoding int encoding = passthrough ? getEncodingForMimeType(mimeType) : pcmEncoding; int channelCount = inputChannelCount;
int sampleRate = inputSampleRate;
@C.Encoding int encoding;
boolean passthrough = !MimeTypes.AUDIO_RAW.equals(inputMimeType);
boolean flush = false; boolean flush = false;
if (!passthrough) { if (!passthrough) {
pcmFrameSize = Util.getPcmFrameSize(pcmEncoding, channelCount); encoding = inputPcmEncoding;
pcmFrameSize = Util.getPcmFrameSize(inputPcmEncoding, channelCount);
trimmingAudioProcessor.setTrimSampleCount(trimStartSamples, trimEndSamples); trimmingAudioProcessor.setTrimSampleCount(trimStartSamples, trimEndSamples);
channelMappingAudioProcessor.setChannelMap(outputChannels); channelMappingAudioProcessor.setChannelMap(outputChannels);
for (AudioProcessor audioProcessor : availableAudioProcessors) { for (AudioProcessor audioProcessor : availableAudioProcessors) {
...@@ -355,12 +353,15 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -355,12 +353,15 @@ public final class DefaultAudioSink implements AudioSink {
} }
if (audioProcessor.isActive()) { if (audioProcessor.isActive()) {
channelCount = audioProcessor.getOutputChannelCount(); channelCount = audioProcessor.getOutputChannelCount();
sampleRate = audioProcessor.getOutputSampleRateHz();
encoding = audioProcessor.getOutputEncoding(); encoding = audioProcessor.getOutputEncoding();
} }
} }
if (flush) { if (flush) {
resetAudioProcessors(); resetAudioProcessors();
} }
} else {
encoding = getEncodingForMimeType(inputMimeType);
} }
int channelConfig; int channelConfig;
...@@ -598,8 +599,8 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -598,8 +599,8 @@ public final class DefaultAudioSink implements AudioSink {
startMediaTimeState = START_IN_SYNC; startMediaTimeState = START_IN_SYNC;
} else { } else {
// Sanity check that presentationTimeUs is consistent with the expected value. // Sanity check that presentationTimeUs is consistent with the expected value.
long expectedPresentationTimeUs = startMediaTimeUs long expectedPresentationTimeUs =
+ framesToDurationUs(getSubmittedFrames()); startMediaTimeUs + inputFramesToDurationUs(getSubmittedFrames());
if (startMediaTimeState == START_IN_SYNC if (startMediaTimeState == START_IN_SYNC
&& Math.abs(expectedPresentationTimeUs - presentationTimeUs) > 200000) { && Math.abs(expectedPresentationTimeUs - presentationTimeUs) > 200000) {
Log.e(TAG, "Discontinuity detected [expected " + expectedPresentationTimeUs + ", got " Log.e(TAG, "Discontinuity detected [expected " + expectedPresentationTimeUs + ", got "
...@@ -997,15 +998,11 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -997,15 +998,11 @@ public final class DefaultAudioSink implements AudioSink {
return positionUs + playbackParametersOffsetUs - playbackParametersPositionUs; return positionUs + playbackParametersOffsetUs - playbackParametersPositionUs;
} }
if (playbackParametersCheckpoints.isEmpty() if (playbackParametersCheckpoints.isEmpty()) {
&& sonicAudioProcessor.getOutputByteCount() >= SONIC_MIN_BYTES_FOR_SPEEDUP) {
return playbackParametersOffsetUs return playbackParametersOffsetUs
+ Util.scaleLargeTimestamp(positionUs - playbackParametersPositionUs, + sonicAudioProcessor.scaleDurationForSpeedup(positionUs - playbackParametersPositionUs);
sonicAudioProcessor.getInputByteCount(), sonicAudioProcessor.getOutputByteCount());
} }
// We are playing data at a previous playback speed, so fall back to multiplying by the speed.
// We are playing drained data at a previous playback speed, or don't have enough bytes to
// calculate an accurate speedup, so fall back to multiplying by the speed.
return playbackParametersOffsetUs return playbackParametersOffsetUs
+ (long) ((double) playbackParameters.speed * (positionUs - playbackParametersPositionUs)); + (long) ((double) playbackParameters.speed * (positionUs - playbackParametersPositionUs));
} }
...@@ -1098,6 +1095,10 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -1098,6 +1095,10 @@ public final class DefaultAudioSink implements AudioSink {
return audioTrack != null; return audioTrack != null;
} }
private long inputFramesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / inputSampleRate;
}
private long framesToDurationUs(long frameCount) { private long framesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / sampleRate; return (frameCount * C.MICROS_PER_SECOND) / sampleRate;
} }
......
...@@ -80,6 +80,11 @@ import java.nio.ByteOrder; ...@@ -80,6 +80,11 @@ import java.nio.ByteOrder;
} }
@Override @Override
public int getOutputSampleRateHz() {
return sampleRateHz;
}
@Override
public void queueInput(ByteBuffer inputBuffer) { public void queueInput(ByteBuffer inputBuffer) {
// Prepare the output buffer. // Prepare the output buffer.
int position = inputBuffer.position(); int position = inputBuffer.position();
......
...@@ -32,8 +32,11 @@ import java.util.Arrays; ...@@ -32,8 +32,11 @@ import java.util.Arrays;
private static final int MAXIMUM_PITCH = 400; private static final int MAXIMUM_PITCH = 400;
private static final int AMDF_FREQUENCY = 4000; private static final int AMDF_FREQUENCY = 4000;
private final int sampleRate; private final int inputSampleRateHz;
private final int numChannels; private final int numChannels;
private final float speed;
private final float pitch;
private final float rate;
private final int minPeriod; private final int minPeriod;
private final int maxPeriod; private final int maxPeriod;
private final int maxRequired; private final int maxRequired;
...@@ -47,8 +50,6 @@ import java.util.Arrays; ...@@ -47,8 +50,6 @@ import java.util.Arrays;
private short[] pitchBuffer; private short[] pitchBuffer;
private int oldRatePosition; private int oldRatePosition;
private int newRatePosition; private int newRatePosition;
private float speed;
private float pitch;
private int numInputSamples; private int numInputSamples;
private int numOutputSamples; private int numOutputSamples;
private int numPitchSamples; private int numPitchSamples;
...@@ -61,14 +62,18 @@ import java.util.Arrays; ...@@ -61,14 +62,18 @@ import java.util.Arrays;
/** /**
* Creates a new Sonic audio stream processor. * Creates a new Sonic audio stream processor.
* *
* @param sampleRate The sample rate of input audio. * @param inputSampleRateHz The sample rate of input audio, in hertz.
* @param numChannels The number of channels in the input audio. * @param numChannels The number of channels in the input audio.
* @param speed The speedup factor for output audio.
* @param pitch The pitch factor for output audio.
* @param outputSampleRateHz The sample rate for output audio, in hertz.
*/ */
public Sonic(int sampleRate, int numChannels) { public Sonic(int inputSampleRateHz, int numChannels, float speed, float pitch,
this.sampleRate = sampleRate; int outputSampleRateHz) {
this.inputSampleRateHz = inputSampleRateHz;
this.numChannels = numChannels; this.numChannels = numChannels;
minPeriod = sampleRate / MAXIMUM_PITCH; minPeriod = inputSampleRateHz / MAXIMUM_PITCH;
maxPeriod = sampleRate / MINIMUM_PITCH; maxPeriod = inputSampleRateHz / MINIMUM_PITCH;
maxRequired = 2 * maxPeriod; maxRequired = 2 * maxPeriod;
downSampleBuffer = new short[maxRequired]; downSampleBuffer = new short[maxRequired];
inputBufferSize = maxRequired; inputBufferSize = maxRequired;
...@@ -80,36 +85,9 @@ import java.util.Arrays; ...@@ -80,36 +85,9 @@ import java.util.Arrays;
oldRatePosition = 0; oldRatePosition = 0;
newRatePosition = 0; newRatePosition = 0;
prevPeriod = 0; prevPeriod = 0;
speed = 1.0f;
pitch = 1.0f;
}
/**
* Sets the output speed.
*/
public void setSpeed(float speed) {
this.speed = speed; this.speed = speed;
}
/**
* Gets the output speed.
*/
public float getSpeed() {
return speed;
}
/**
* Sets the output pitch.
*/
public void setPitch(float pitch) {
this.pitch = pitch; this.pitch = pitch;
} this.rate = (float) inputSampleRateHz / outputSampleRateHz;
/**
* Gets the output pitch.
*/
public float getPitch() {
return pitch;
} }
/** /**
...@@ -148,8 +126,9 @@ import java.util.Arrays; ...@@ -148,8 +126,9 @@ import java.util.Arrays;
public void queueEndOfStream() { public void queueEndOfStream() {
int remainingSamples = numInputSamples; int remainingSamples = numInputSamples;
float s = speed / pitch; float s = speed / pitch;
float r = rate * pitch;
int expectedOutputSamples = int expectedOutputSamples =
numOutputSamples + (int) ((remainingSamples / s + numPitchSamples) / pitch + 0.5f); numOutputSamples + (int) ((remainingSamples / s + numPitchSamples) / r + 0.5f);
// Add enough silence to flush both input and pitch buffers. // Add enough silence to flush both input and pitch buffers.
enlargeInputBufferIfNeeded(remainingSamples + 2 * maxRequired); enlargeInputBufferIfNeeded(remainingSamples + 2 * maxRequired);
...@@ -292,7 +271,7 @@ import java.util.Arrays; ...@@ -292,7 +271,7 @@ import java.util.Arrays;
// sampling. // sampling.
int period; int period;
int retPeriod; int retPeriod;
int skip = sampleRate > AMDF_FREQUENCY ? sampleRate / AMDF_FREQUENCY : 1; int skip = inputSampleRateHz > AMDF_FREQUENCY ? inputSampleRateHz / AMDF_FREQUENCY : 1;
if (numChannels == 1 && skip == 1) { if (numChannels == 1 && skip == 1) {
period = findPitchPeriodInRange(samples, position, minPeriod, maxPeriod); period = findPitchPeriodInRange(samples, position, minPeriod, maxPeriod);
} else { } else {
...@@ -388,8 +367,8 @@ import java.util.Arrays; ...@@ -388,8 +367,8 @@ import java.util.Arrays;
if (numOutputSamples == originalNumOutputSamples) { if (numOutputSamples == originalNumOutputSamples) {
return; return;
} }
int newSampleRate = (int) (sampleRate / rate); int newSampleRate = (int) (inputSampleRateHz / rate);
int oldSampleRate = sampleRate; int oldSampleRate = inputSampleRateHz;
// Set these values to help with the integer math. // Set these values to help with the integer math.
while (newSampleRate > (1 << 14) || oldSampleRate > (1 << 14)) { while (newSampleRate > (1 << 14) || oldSampleRate > (1 << 14)) {
newSampleRate /= 2; newSampleRate /= 2;
...@@ -476,6 +455,7 @@ import java.util.Arrays; ...@@ -476,6 +455,7 @@ import java.util.Arrays;
// Resample as many pitch periods as we have buffered on the input. // Resample as many pitch periods as we have buffered on the input.
int originalNumOutputSamples = numOutputSamples; int originalNumOutputSamples = numOutputSamples;
float s = speed / pitch; float s = speed / pitch;
float r = rate * pitch;
if (s > 1.00001 || s < 0.99999) { if (s > 1.00001 || s < 0.99999) {
changeSpeed(s); changeSpeed(s);
} else { } else {
...@@ -486,8 +466,8 @@ import java.util.Arrays; ...@@ -486,8 +466,8 @@ import java.util.Arrays;
if (pitch != 1.0f) { if (pitch != 1.0f) {
adjustPitch(originalNumOutputSamples); adjustPitch(originalNumOutputSamples);
} }
} else if (!USE_CHORD_PITCH && pitch != 1.0f) { } else if (r != 1.0f) {
adjustRate(pitch, originalNumOutputSamples); adjustRate(r, originalNumOutputSamples);
} }
} }
......
...@@ -24,7 +24,7 @@ import java.nio.ByteOrder; ...@@ -24,7 +24,7 @@ import java.nio.ByteOrder;
import java.nio.ShortBuffer; import java.nio.ShortBuffer;
/** /**
* An {@link AudioProcessor} that uses the Sonic library to modify the speed/pitch of audio. * An {@link AudioProcessor} that uses the Sonic library to modify audio speed/pitch/sample rate.
*/ */
public final class SonicAudioProcessor implements AudioProcessor { public final class SonicAudioProcessor implements AudioProcessor {
...@@ -44,18 +44,30 @@ public final class SonicAudioProcessor implements AudioProcessor { ...@@ -44,18 +44,30 @@ public final class SonicAudioProcessor implements AudioProcessor {
* The minimum allowed pitch in {@link #setPitch(float)}. * The minimum allowed pitch in {@link #setPitch(float)}.
*/ */
public static final float MINIMUM_PITCH = 0.1f; public static final float MINIMUM_PITCH = 0.1f;
/**
* Indicates that the output sample rate should be the same as the input.
*/
public static final int SAMPLE_RATE_NO_CHANGE = -1;
/** /**
* The threshold below which the difference between two pitch/speed factors is negligible. * The threshold below which the difference between two pitch/speed factors is negligible.
*/ */
private static final float CLOSE_THRESHOLD = 0.01f; private static final float CLOSE_THRESHOLD = 0.01f;
/**
* The minimum number of output bytes at which the speedup is calculated using the input/output
* byte counts, rather than using the current playback parameters speed.
*/
private static final int MIN_BYTES_FOR_SPEEDUP_CALCULATION = 1024;
private int pendingOutputSampleRateHz;
private int channelCount; private int channelCount;
private int sampleRateHz; private int sampleRateHz;
private Sonic sonic; private Sonic sonic;
private float speed; private float speed;
private float pitch; private float pitch;
private int outputSampleRateHz;
private ByteBuffer buffer; private ByteBuffer buffer;
private ShortBuffer shortBuffer; private ShortBuffer shortBuffer;
...@@ -72,9 +84,11 @@ public final class SonicAudioProcessor implements AudioProcessor { ...@@ -72,9 +84,11 @@ public final class SonicAudioProcessor implements AudioProcessor {
pitch = 1f; pitch = 1f;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
outputSampleRateHz = Format.NO_VALUE;
buffer = EMPTY_BUFFER; buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer(); shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
} }
/** /**
...@@ -100,17 +114,34 @@ public final class SonicAudioProcessor implements AudioProcessor { ...@@ -100,17 +114,34 @@ public final class SonicAudioProcessor implements AudioProcessor {
} }
/** /**
* Returns the number of bytes of input queued since the last call to {@link #flush()}. * Sets the sample rate for output audio, in hertz. Pass {@link #SAMPLE_RATE_NO_CHANGE} to output
* audio at the same sample rate as the input. After calling this method, call
* {@link #configure(int, int, int)} to start using the new sample rate.
*
* @param sampleRateHz The sample rate for output audio, in hertz.
* @see #configure(int, int, int)
*/ */
public long getInputByteCount() { public void setOutputSampleRateHz(int sampleRateHz) {
return inputBytes; pendingOutputSampleRateHz = sampleRateHz;
} }
/** /**
* Returns the number of bytes of output dequeued since the last call to {@link #flush()}. * Returns the specified duration scaled to take into account the speedup factor of this instance,
* in the same units as {@code duration}.
*
* @param duration The duration to scale taking into account speedup.
* @return The specified duration scaled to take into account speedup, in the same units as
* {@code duration}.
*/ */
public long getOutputByteCount() { public long scaleDurationForSpeedup(long duration) {
return outputBytes; if (outputBytes >= MIN_BYTES_FOR_SPEEDUP_CALCULATION) {
return outputSampleRateHz == sampleRateHz
? Util.scaleLargeTimestamp(duration, inputBytes, outputBytes)
: Util.scaleLargeTimestamp(duration, inputBytes * outputSampleRateHz,
outputBytes * sampleRateHz);
} else {
return (long) ((double) speed * duration);
}
} }
@Override @Override
...@@ -119,17 +150,22 @@ public final class SonicAudioProcessor implements AudioProcessor { ...@@ -119,17 +150,22 @@ public final class SonicAudioProcessor implements AudioProcessor {
if (encoding != C.ENCODING_PCM_16BIT) { if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding); throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
} }
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount) { int outputSampleRateHz = pendingOutputSampleRateHz == SAMPLE_RATE_NO_CHANGE
? sampleRateHz : pendingOutputSampleRateHz;
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount
&& this.outputSampleRateHz == outputSampleRateHz) {
return false; return false;
} }
this.sampleRateHz = sampleRateHz; this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount; this.channelCount = channelCount;
this.outputSampleRateHz = outputSampleRateHz;
return true; return true;
} }
@Override @Override
public boolean isActive() { public boolean isActive() {
return Math.abs(speed - 1f) >= CLOSE_THRESHOLD || Math.abs(pitch - 1f) >= CLOSE_THRESHOLD; return Math.abs(speed - 1f) >= CLOSE_THRESHOLD || Math.abs(pitch - 1f) >= CLOSE_THRESHOLD
|| outputSampleRateHz != sampleRateHz;
} }
@Override @Override
...@@ -143,6 +179,11 @@ public final class SonicAudioProcessor implements AudioProcessor { ...@@ -143,6 +179,11 @@ public final class SonicAudioProcessor implements AudioProcessor {
} }
@Override @Override
public int getOutputSampleRateHz() {
return outputSampleRateHz;
}
@Override
public void queueInput(ByteBuffer inputBuffer) { public void queueInput(ByteBuffer inputBuffer) {
if (inputBuffer.hasRemaining()) { if (inputBuffer.hasRemaining()) {
ShortBuffer shortBuffer = inputBuffer.asShortBuffer(); ShortBuffer shortBuffer = inputBuffer.asShortBuffer();
...@@ -187,9 +228,7 @@ public final class SonicAudioProcessor implements AudioProcessor { ...@@ -187,9 +228,7 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override @Override
public void flush() { public void flush() {
sonic = new Sonic(sampleRateHz, channelCount); sonic = new Sonic(sampleRateHz, channelCount, speed, pitch, outputSampleRateHz);
sonic.setSpeed(speed);
sonic.setPitch(pitch);
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
inputBytes = 0; inputBytes = 0;
outputBytes = 0; outputBytes = 0;
...@@ -204,9 +243,11 @@ public final class SonicAudioProcessor implements AudioProcessor { ...@@ -204,9 +243,11 @@ public final class SonicAudioProcessor implements AudioProcessor {
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE; sampleRateHz = Format.NO_VALUE;
outputSampleRateHz = Format.NO_VALUE;
inputBytes = 0; inputBytes = 0;
outputBytes = 0; outputBytes = 0;
inputEnded = false; inputEnded = false;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
} }
} }
...@@ -31,6 +31,7 @@ import java.nio.ByteOrder; ...@@ -31,6 +31,7 @@ import java.nio.ByteOrder;
private int trimStartSamples; private int trimStartSamples;
private int trimEndSamples; private int trimEndSamples;
private int channelCount; private int channelCount;
private int sampleRateHz;
private int pendingTrimStartBytes; private int pendingTrimStartBytes;
private ByteBuffer buffer; private ByteBuffer buffer;
...@@ -69,6 +70,7 @@ import java.nio.ByteOrder; ...@@ -69,6 +70,7 @@ import java.nio.ByteOrder;
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding); throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
} }
this.channelCount = channelCount; this.channelCount = channelCount;
this.sampleRateHz = sampleRateHz;
endBuffer = new byte[trimEndSamples * channelCount * 2]; endBuffer = new byte[trimEndSamples * channelCount * 2];
endBufferSize = 0; endBufferSize = 0;
pendingTrimStartBytes = trimStartSamples * channelCount * 2; pendingTrimStartBytes = trimStartSamples * channelCount * 2;
...@@ -93,6 +95,11 @@ import java.nio.ByteOrder; ...@@ -93,6 +95,11 @@ import java.nio.ByteOrder;
} }
@Override @Override
public int getOutputSampleRateHz() {
return sampleRateHz;
}
@Override
public void queueInput(ByteBuffer inputBuffer) { public void queueInput(ByteBuffer inputBuffer) {
int position = inputBuffer.position(); int position = inputBuffer.position();
int limit = inputBuffer.limit(); int limit = inputBuffer.limit();
...@@ -174,6 +181,7 @@ import java.nio.ByteOrder; ...@@ -174,6 +181,7 @@ import java.nio.ByteOrder;
flush(); flush();
buffer = EMPTY_BUFFER; buffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE; channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
endBuffer = null; endBuffer = null;
} }
......
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.audio;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.fail;
import com.google.android.exoplayer2.C;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
/**
* Unit test for {@link SonicAudioProcessor}.
*/
@RunWith(RobolectricTestRunner.class)
@Config(sdk = Config.TARGET_SDK, manifest = Config.NONE)
public final class SonicAudioProcessorTest {
private SonicAudioProcessor sonicAudioProcessor;
@Before
public void setUp() {
sonicAudioProcessor = new SonicAudioProcessor();
}
@Test
public void testReconfigureWithSameSampleRate() throws Exception {
// When configured for resampling from 44.1 kHz to 48 kHz, the output sample rate is correct.
sonicAudioProcessor.setOutputSampleRateHz(48000);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
assertThat(sonicAudioProcessor.isActive()).isTrue();
// When reconfigured with 48 kHz input, there is no resampling.
sonicAudioProcessor.configure(48000, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
assertThat(sonicAudioProcessor.isActive()).isFalse();
// When reconfigure with 44.1 kHz input, resampling is enabled again.
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
assertThat(sonicAudioProcessor.isActive()).isTrue();
}
@Test
public void testNoSampleRateChange() throws Exception {
// Configure for resampling 44.1 kHz to 48 kHz.
sonicAudioProcessor.setOutputSampleRateHz(48000);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
// Reconfigure to not modify the sample rate.
sonicAudioProcessor.setOutputSampleRateHz(SonicAudioProcessor.SAMPLE_RATE_NO_CHANGE);
sonicAudioProcessor.configure(22050, 2, C.ENCODING_PCM_16BIT);
// The sample rate is unmodified, and the audio processor is not active.
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(22050);
assertThat(sonicAudioProcessor.isActive()).isFalse();
}
@Test
public void testBecomesActiveAfterConfigure() throws Exception {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
// Set a new sample rate.
sonicAudioProcessor.setOutputSampleRateHz(22050);
// The new sample rate is not active yet.
assertThat(sonicAudioProcessor.isActive()).isFalse();
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(44100);
}
@Test
public void testSampleRateChangeBecomesActiveAfterConfigure() throws Exception {
// Configure for resampling 44.1 kHz to 48 kHz.
sonicAudioProcessor.setOutputSampleRateHz(48000);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
// Set a new sample rate, which isn't active yet.
sonicAudioProcessor.setOutputSampleRateHz(22050);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
// The new sample rate takes effect on reconfiguration.
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(22050);
}
@Test
public void testIsActiveWithSpeedChange() throws Exception {
sonicAudioProcessor.setSpeed(1.5f);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.isActive()).isTrue();
}
@Test
public void testIsActiveWithPitchChange() throws Exception {
sonicAudioProcessor.setPitch(1.5f);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.isActive()).isTrue();
}
@Test
public void testIsNotActiveWithNoChange() throws Exception {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.isActive()).isFalse();
}
@Test
public void testDoesNotSupportNon16BitInput() throws Exception {
try {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_8BIT);
fail();
} catch (AudioProcessor.UnhandledFormatException e) {
// Expected.
}
try {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_24BIT);
fail();
} catch (AudioProcessor.UnhandledFormatException e) {
// Expected.
}
try {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_32BIT);
fail();
} catch (AudioProcessor.UnhandledFormatException e) {
// Expected.
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment