Commit ac31dc7c by andrewlewis Committed by Oliver Woodman

Allow setting output sample rate in SonicAudioProcessor

This is not really useful with the DefaultAudioSink, but could be used in a
custom AudioSink when mixing audio from sources that have different sample
rates.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=172434482
parent e46a7600
......@@ -139,6 +139,11 @@ public final class GvrAudioProcessor implements AudioProcessor {
}
@Override
public int getOutputSampleRateHz() {
return sampleRateHz;
}
@Override
public void queueInput(ByteBuffer input) {
int position = input.position();
int readBytes = gvrAudioSurround.addInput(input, position, input.limit() - position);
......
......@@ -20,7 +20,15 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Interface for audio processors.
* Interface for audio processors, which take audio data as input and transform it, potentially
* modifying its channel count, encoding and/or sample rate.
* <p>
* Call {@link #configure(int, int, int)} to configure the processor to receive input audio, then
* call {@link #isActive()} to determine whether the processor is active.
* {@link #queueInput(ByteBuffer)}, {@link #queueEndOfStream()}, {@link #getOutput()},
* {@link #isEnded()}, {@link #getOutputChannelCount()}, {@link #getOutputEncoding()} and
* {@link #getOutputSampleRateHz()} may only be called if the processor is active. Call
* {@link #reset()} to reset the processor to its unconfigured state.
*/
public interface AudioProcessor {
......@@ -46,8 +54,9 @@ public interface AudioProcessor {
* method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the
* processor will not accept any buffers until it is reconfigured. Returns {@code true} if the
* processor must be flushed, or if the value returned by {@link #isActive()} has changed as a
* result of the call. If it's active, {@link #getOutputChannelCount()} and
* {@link #getOutputEncoding()} return the processor's output format.
* result of the call. If it's active, {@link #getOutputSampleRateHz()},
* {@link #getOutputChannelCount()} and {@link #getOutputEncoding()} return the processor's output
* format.
*
* @param sampleRateHz The sample rate of input audio in Hz.
* @param channelCount The number of interleaved channels in input audio.
......@@ -65,17 +74,28 @@ public interface AudioProcessor {
boolean isActive();
/**
* Returns the number of audio channels in the data output by the processor.
* Returns the number of audio channels in the data output by the processor. The value may change
* as a result of calling {@link #configure(int, int, int)} and is undefined if the instance is
* not active.
*/
int getOutputChannelCount();
/**
* Returns the audio encoding used in the data output by the processor.
* Returns the audio encoding used in the data output by the processor. The value may change as a
* result of calling {@link #configure(int, int, int)} and is undefined if the instance is not
* active.
*/
@C.Encoding
int getOutputEncoding();
/**
* Returns the sample rate of audio output by the processor, in hertz. The value may change as a
* result of calling {@link #configure(int, int, int)} and is undefined if the instance is not
* active.
*/
int getOutputSampleRateHz();
/**
* Queues audio data between the position and limit of the input {@code buffer} for processing.
* {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as
* read-only. Its position will be advanced by the number of bytes consumed (which may be zero).
......@@ -116,7 +136,7 @@ public interface AudioProcessor {
void flush();
/**
* Resets the processor to its initial state.
* Resets the processor to its unconfigured state.
*/
void reset();
......
......@@ -186,12 +186,12 @@ public interface AudioSink {
/**
* Configures (or reconfigures) the sink.
*
* @param mimeType The MIME type of audio data provided in the input buffers.
* @param channelCount The number of channels.
* @param sampleRate The sample rate in Hz.
* @param pcmEncoding For PCM formats, the encoding used. One of {@link C#ENCODING_PCM_16BIT},
* {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and
* {@link C#ENCODING_PCM_32BIT}.
* @param inputMimeType The MIME type of audio data provided in the input buffers.
* @param inputChannelCount The number of channels.
* @param inputSampleRate The sample rate in Hz.
* @param inputPcmEncoding For PCM formats, the encoding used. One of
* {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT}
* and {@link C#ENCODING_PCM_32BIT}.
* @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a
* suitable buffer size.
* @param outputChannels A mapping from input to output channels that is applied to this sink's
......@@ -206,9 +206,9 @@ public interface AudioSink {
* {@link #configure(String, int, int, int, int, int[], int, int)}.
* @throws ConfigurationException If an error occurs configuring the sink.
*/
void configure(String mimeType, int channelCount, int sampleRate, @C.PcmEncoding int pcmEncoding,
int specifiedBufferSize, @Nullable int[] outputChannels, int trimStartSamples,
int trimEndSamples) throws ConfigurationException;
void configure(String inputMimeType, int inputChannelCount, int inputSampleRate,
@C.PcmEncoding int inputPcmEncoding, int specifiedBufferSize, @Nullable int[] outputChannels,
int trimStartSamples, int trimEndSamples) throws ConfigurationException;
/**
* Starts or resumes consuming audio if initialized.
......
......@@ -104,6 +104,11 @@ import java.util.Arrays;
}
@Override
public int getOutputSampleRateHz() {
return sampleRateHz;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
int position = inputBuffer.position();
int limit = inputBuffer.limit();
......
......@@ -149,13 +149,6 @@ public final class DefaultAudioSink implements AudioSink {
private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000;
/**
* The minimum number of output bytes from {@link #sonicAudioProcessor} at which the speedup is
* calculated using the input/output byte counts from the processor, rather than using the
* current playback parameters speed.
*/
private static final int SONIC_MIN_BYTES_FOR_SPEEDUP = 1024;
/**
* Whether to enable a workaround for an issue where an audio effect does not keep its session
* active across releasing/initializing a new audio track, on platform builds where
* {@link Util#SDK_INT} &lt; 21.
......@@ -189,6 +182,7 @@ public final class DefaultAudioSink implements AudioSink {
*/
private AudioTrack keepSessionIdAudioTrack;
private AudioTrack audioTrack;
private int inputSampleRate;
private int sampleRate;
private int channelConfig;
private @C.Encoding int encoding;
......@@ -337,14 +331,18 @@ public final class DefaultAudioSink implements AudioSink {
}
@Override
public void configure(String mimeType, int channelCount, int sampleRate,
@C.PcmEncoding int pcmEncoding, int specifiedBufferSize, @Nullable int[] outputChannels,
public void configure(String inputMimeType, int inputChannelCount, int inputSampleRate,
@C.PcmEncoding int inputPcmEncoding, int specifiedBufferSize, @Nullable int[] outputChannels,
int trimStartSamples, int trimEndSamples) throws ConfigurationException {
boolean passthrough = !MimeTypes.AUDIO_RAW.equals(mimeType);
@C.Encoding int encoding = passthrough ? getEncodingForMimeType(mimeType) : pcmEncoding;
this.inputSampleRate = inputSampleRate;
int channelCount = inputChannelCount;
int sampleRate = inputSampleRate;
@C.Encoding int encoding;
boolean passthrough = !MimeTypes.AUDIO_RAW.equals(inputMimeType);
boolean flush = false;
if (!passthrough) {
pcmFrameSize = Util.getPcmFrameSize(pcmEncoding, channelCount);
encoding = inputPcmEncoding;
pcmFrameSize = Util.getPcmFrameSize(inputPcmEncoding, channelCount);
trimmingAudioProcessor.setTrimSampleCount(trimStartSamples, trimEndSamples);
channelMappingAudioProcessor.setChannelMap(outputChannels);
for (AudioProcessor audioProcessor : availableAudioProcessors) {
......@@ -355,12 +353,15 @@ public final class DefaultAudioSink implements AudioSink {
}
if (audioProcessor.isActive()) {
channelCount = audioProcessor.getOutputChannelCount();
sampleRate = audioProcessor.getOutputSampleRateHz();
encoding = audioProcessor.getOutputEncoding();
}
}
if (flush) {
resetAudioProcessors();
}
} else {
encoding = getEncodingForMimeType(inputMimeType);
}
int channelConfig;
......@@ -598,8 +599,8 @@ public final class DefaultAudioSink implements AudioSink {
startMediaTimeState = START_IN_SYNC;
} else {
// Sanity check that presentationTimeUs is consistent with the expected value.
long expectedPresentationTimeUs = startMediaTimeUs
+ framesToDurationUs(getSubmittedFrames());
long expectedPresentationTimeUs =
startMediaTimeUs + inputFramesToDurationUs(getSubmittedFrames());
if (startMediaTimeState == START_IN_SYNC
&& Math.abs(expectedPresentationTimeUs - presentationTimeUs) > 200000) {
Log.e(TAG, "Discontinuity detected [expected " + expectedPresentationTimeUs + ", got "
......@@ -997,15 +998,11 @@ public final class DefaultAudioSink implements AudioSink {
return positionUs + playbackParametersOffsetUs - playbackParametersPositionUs;
}
if (playbackParametersCheckpoints.isEmpty()
&& sonicAudioProcessor.getOutputByteCount() >= SONIC_MIN_BYTES_FOR_SPEEDUP) {
if (playbackParametersCheckpoints.isEmpty()) {
return playbackParametersOffsetUs
+ Util.scaleLargeTimestamp(positionUs - playbackParametersPositionUs,
sonicAudioProcessor.getInputByteCount(), sonicAudioProcessor.getOutputByteCount());
+ sonicAudioProcessor.scaleDurationForSpeedup(positionUs - playbackParametersPositionUs);
}
// We are playing drained data at a previous playback speed, or don't have enough bytes to
// calculate an accurate speedup, so fall back to multiplying by the speed.
// We are playing data at a previous playback speed, so fall back to multiplying by the speed.
return playbackParametersOffsetUs
+ (long) ((double) playbackParameters.speed * (positionUs - playbackParametersPositionUs));
}
......@@ -1098,6 +1095,10 @@ public final class DefaultAudioSink implements AudioSink {
return audioTrack != null;
}
private long inputFramesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / inputSampleRate;
}
private long framesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / sampleRate;
}
......
......@@ -80,6 +80,11 @@ import java.nio.ByteOrder;
}
@Override
public int getOutputSampleRateHz() {
return sampleRateHz;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
// Prepare the output buffer.
int position = inputBuffer.position();
......
......@@ -32,8 +32,11 @@ import java.util.Arrays;
private static final int MAXIMUM_PITCH = 400;
private static final int AMDF_FREQUENCY = 4000;
private final int sampleRate;
private final int inputSampleRateHz;
private final int numChannels;
private final float speed;
private final float pitch;
private final float rate;
private final int minPeriod;
private final int maxPeriod;
private final int maxRequired;
......@@ -47,8 +50,6 @@ import java.util.Arrays;
private short[] pitchBuffer;
private int oldRatePosition;
private int newRatePosition;
private float speed;
private float pitch;
private int numInputSamples;
private int numOutputSamples;
private int numPitchSamples;
......@@ -61,14 +62,18 @@ import java.util.Arrays;
/**
* Creates a new Sonic audio stream processor.
*
* @param sampleRate The sample rate of input audio.
* @param inputSampleRateHz The sample rate of input audio, in hertz.
* @param numChannels The number of channels in the input audio.
* @param speed The speedup factor for output audio.
* @param pitch The pitch factor for output audio.
* @param outputSampleRateHz The sample rate for output audio, in hertz.
*/
public Sonic(int sampleRate, int numChannels) {
this.sampleRate = sampleRate;
public Sonic(int inputSampleRateHz, int numChannels, float speed, float pitch,
int outputSampleRateHz) {
this.inputSampleRateHz = inputSampleRateHz;
this.numChannels = numChannels;
minPeriod = sampleRate / MAXIMUM_PITCH;
maxPeriod = sampleRate / MINIMUM_PITCH;
minPeriod = inputSampleRateHz / MAXIMUM_PITCH;
maxPeriod = inputSampleRateHz / MINIMUM_PITCH;
maxRequired = 2 * maxPeriod;
downSampleBuffer = new short[maxRequired];
inputBufferSize = maxRequired;
......@@ -80,36 +85,9 @@ import java.util.Arrays;
oldRatePosition = 0;
newRatePosition = 0;
prevPeriod = 0;
speed = 1.0f;
pitch = 1.0f;
}
/**
* Sets the output speed.
*/
public void setSpeed(float speed) {
this.speed = speed;
}
/**
* Gets the output speed.
*/
public float getSpeed() {
return speed;
}
/**
* Sets the output pitch.
*/
public void setPitch(float pitch) {
this.pitch = pitch;
}
/**
* Gets the output pitch.
*/
public float getPitch() {
return pitch;
this.rate = (float) inputSampleRateHz / outputSampleRateHz;
}
/**
......@@ -148,8 +126,9 @@ import java.util.Arrays;
public void queueEndOfStream() {
int remainingSamples = numInputSamples;
float s = speed / pitch;
float r = rate * pitch;
int expectedOutputSamples =
numOutputSamples + (int) ((remainingSamples / s + numPitchSamples) / pitch + 0.5f);
numOutputSamples + (int) ((remainingSamples / s + numPitchSamples) / r + 0.5f);
// Add enough silence to flush both input and pitch buffers.
enlargeInputBufferIfNeeded(remainingSamples + 2 * maxRequired);
......@@ -292,7 +271,7 @@ import java.util.Arrays;
// sampling.
int period;
int retPeriod;
int skip = sampleRate > AMDF_FREQUENCY ? sampleRate / AMDF_FREQUENCY : 1;
int skip = inputSampleRateHz > AMDF_FREQUENCY ? inputSampleRateHz / AMDF_FREQUENCY : 1;
if (numChannels == 1 && skip == 1) {
period = findPitchPeriodInRange(samples, position, minPeriod, maxPeriod);
} else {
......@@ -388,8 +367,8 @@ import java.util.Arrays;
if (numOutputSamples == originalNumOutputSamples) {
return;
}
int newSampleRate = (int) (sampleRate / rate);
int oldSampleRate = sampleRate;
int newSampleRate = (int) (inputSampleRateHz / rate);
int oldSampleRate = inputSampleRateHz;
// Set these values to help with the integer math.
while (newSampleRate > (1 << 14) || oldSampleRate > (1 << 14)) {
newSampleRate /= 2;
......@@ -476,6 +455,7 @@ import java.util.Arrays;
// Resample as many pitch periods as we have buffered on the input.
int originalNumOutputSamples = numOutputSamples;
float s = speed / pitch;
float r = rate * pitch;
if (s > 1.00001 || s < 0.99999) {
changeSpeed(s);
} else {
......@@ -486,8 +466,8 @@ import java.util.Arrays;
if (pitch != 1.0f) {
adjustPitch(originalNumOutputSamples);
}
} else if (!USE_CHORD_PITCH && pitch != 1.0f) {
adjustRate(pitch, originalNumOutputSamples);
} else if (r != 1.0f) {
adjustRate(r, originalNumOutputSamples);
}
}
......
......@@ -24,7 +24,7 @@ import java.nio.ByteOrder;
import java.nio.ShortBuffer;
/**
* An {@link AudioProcessor} that uses the Sonic library to modify the speed/pitch of audio.
* An {@link AudioProcessor} that uses the Sonic library to modify audio speed/pitch/sample rate.
*/
public final class SonicAudioProcessor implements AudioProcessor {
......@@ -44,18 +44,30 @@ public final class SonicAudioProcessor implements AudioProcessor {
* The minimum allowed pitch in {@link #setPitch(float)}.
*/
public static final float MINIMUM_PITCH = 0.1f;
/**
* Indicates that the output sample rate should be the same as the input.
*/
public static final int SAMPLE_RATE_NO_CHANGE = -1;
/**
* The threshold below which the difference between two pitch/speed factors is negligible.
*/
private static final float CLOSE_THRESHOLD = 0.01f;
/**
* The minimum number of output bytes at which the speedup is calculated using the input/output
* byte counts, rather than using the current playback parameters speed.
*/
private static final int MIN_BYTES_FOR_SPEEDUP_CALCULATION = 1024;
private int pendingOutputSampleRateHz;
private int channelCount;
private int sampleRateHz;
private Sonic sonic;
private float speed;
private float pitch;
private int outputSampleRateHz;
private ByteBuffer buffer;
private ShortBuffer shortBuffer;
......@@ -72,9 +84,11 @@ public final class SonicAudioProcessor implements AudioProcessor {
pitch = 1f;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
outputSampleRateHz = Format.NO_VALUE;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
}
/**
......@@ -100,17 +114,34 @@ public final class SonicAudioProcessor implements AudioProcessor {
}
/**
* Returns the number of bytes of input queued since the last call to {@link #flush()}.
* Sets the sample rate for output audio, in hertz. Pass {@link #SAMPLE_RATE_NO_CHANGE} to output
* audio at the same sample rate as the input. After calling this method, call
* {@link #configure(int, int, int)} to start using the new sample rate.
*
* @param sampleRateHz The sample rate for output audio, in hertz.
* @see #configure(int, int, int)
*/
public long getInputByteCount() {
return inputBytes;
public void setOutputSampleRateHz(int sampleRateHz) {
pendingOutputSampleRateHz = sampleRateHz;
}
/**
* Returns the number of bytes of output dequeued since the last call to {@link #flush()}.
* Returns the specified duration scaled to take into account the speedup factor of this instance,
* in the same units as {@code duration}.
*
* @param duration The duration to scale taking into account speedup.
* @return The specified duration scaled to take into account speedup, in the same units as
* {@code duration}.
*/
public long getOutputByteCount() {
return outputBytes;
public long scaleDurationForSpeedup(long duration) {
if (outputBytes >= MIN_BYTES_FOR_SPEEDUP_CALCULATION) {
return outputSampleRateHz == sampleRateHz
? Util.scaleLargeTimestamp(duration, inputBytes, outputBytes)
: Util.scaleLargeTimestamp(duration, inputBytes * outputSampleRateHz,
outputBytes * sampleRateHz);
} else {
return (long) ((double) speed * duration);
}
}
@Override
......@@ -119,17 +150,22 @@ public final class SonicAudioProcessor implements AudioProcessor {
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount) {
int outputSampleRateHz = pendingOutputSampleRateHz == SAMPLE_RATE_NO_CHANGE
? sampleRateHz : pendingOutputSampleRateHz;
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount
&& this.outputSampleRateHz == outputSampleRateHz) {
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
this.outputSampleRateHz = outputSampleRateHz;
return true;
}
@Override
public boolean isActive() {
return Math.abs(speed - 1f) >= CLOSE_THRESHOLD || Math.abs(pitch - 1f) >= CLOSE_THRESHOLD;
return Math.abs(speed - 1f) >= CLOSE_THRESHOLD || Math.abs(pitch - 1f) >= CLOSE_THRESHOLD
|| outputSampleRateHz != sampleRateHz;
}
@Override
......@@ -143,6 +179,11 @@ public final class SonicAudioProcessor implements AudioProcessor {
}
@Override
public int getOutputSampleRateHz() {
return outputSampleRateHz;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
if (inputBuffer.hasRemaining()) {
ShortBuffer shortBuffer = inputBuffer.asShortBuffer();
......@@ -187,9 +228,7 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override
public void flush() {
sonic = new Sonic(sampleRateHz, channelCount);
sonic.setSpeed(speed);
sonic.setPitch(pitch);
sonic = new Sonic(sampleRateHz, channelCount, speed, pitch, outputSampleRateHz);
outputBuffer = EMPTY_BUFFER;
inputBytes = 0;
outputBytes = 0;
......@@ -204,9 +243,11 @@ public final class SonicAudioProcessor implements AudioProcessor {
outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
outputSampleRateHz = Format.NO_VALUE;
inputBytes = 0;
outputBytes = 0;
inputEnded = false;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
}
}
......@@ -31,6 +31,7 @@ import java.nio.ByteOrder;
private int trimStartSamples;
private int trimEndSamples;
private int channelCount;
private int sampleRateHz;
private int pendingTrimStartBytes;
private ByteBuffer buffer;
......@@ -69,6 +70,7 @@ import java.nio.ByteOrder;
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
this.channelCount = channelCount;
this.sampleRateHz = sampleRateHz;
endBuffer = new byte[trimEndSamples * channelCount * 2];
endBufferSize = 0;
pendingTrimStartBytes = trimStartSamples * channelCount * 2;
......@@ -93,6 +95,11 @@ import java.nio.ByteOrder;
}
@Override
public int getOutputSampleRateHz() {
return sampleRateHz;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
int position = inputBuffer.position();
int limit = inputBuffer.limit();
......@@ -174,6 +181,7 @@ import java.nio.ByteOrder;
flush();
buffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
endBuffer = null;
}
......
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.audio;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.fail;
import com.google.android.exoplayer2.C;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
/**
* Unit test for {@link SonicAudioProcessor}.
*/
@RunWith(RobolectricTestRunner.class)
@Config(sdk = Config.TARGET_SDK, manifest = Config.NONE)
public final class SonicAudioProcessorTest {
private SonicAudioProcessor sonicAudioProcessor;
@Before
public void setUp() {
sonicAudioProcessor = new SonicAudioProcessor();
}
@Test
public void testReconfigureWithSameSampleRate() throws Exception {
// When configured for resampling from 44.1 kHz to 48 kHz, the output sample rate is correct.
sonicAudioProcessor.setOutputSampleRateHz(48000);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
assertThat(sonicAudioProcessor.isActive()).isTrue();
// When reconfigured with 48 kHz input, there is no resampling.
sonicAudioProcessor.configure(48000, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
assertThat(sonicAudioProcessor.isActive()).isFalse();
// When reconfigure with 44.1 kHz input, resampling is enabled again.
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
assertThat(sonicAudioProcessor.isActive()).isTrue();
}
@Test
public void testNoSampleRateChange() throws Exception {
// Configure for resampling 44.1 kHz to 48 kHz.
sonicAudioProcessor.setOutputSampleRateHz(48000);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
// Reconfigure to not modify the sample rate.
sonicAudioProcessor.setOutputSampleRateHz(SonicAudioProcessor.SAMPLE_RATE_NO_CHANGE);
sonicAudioProcessor.configure(22050, 2, C.ENCODING_PCM_16BIT);
// The sample rate is unmodified, and the audio processor is not active.
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(22050);
assertThat(sonicAudioProcessor.isActive()).isFalse();
}
@Test
public void testBecomesActiveAfterConfigure() throws Exception {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
// Set a new sample rate.
sonicAudioProcessor.setOutputSampleRateHz(22050);
// The new sample rate is not active yet.
assertThat(sonicAudioProcessor.isActive()).isFalse();
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(44100);
}
@Test
public void testSampleRateChangeBecomesActiveAfterConfigure() throws Exception {
// Configure for resampling 44.1 kHz to 48 kHz.
sonicAudioProcessor.setOutputSampleRateHz(48000);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
// Set a new sample rate, which isn't active yet.
sonicAudioProcessor.setOutputSampleRateHz(22050);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(48000);
// The new sample rate takes effect on reconfiguration.
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.getOutputSampleRateHz()).isEqualTo(22050);
}
@Test
public void testIsActiveWithSpeedChange() throws Exception {
sonicAudioProcessor.setSpeed(1.5f);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.isActive()).isTrue();
}
@Test
public void testIsActiveWithPitchChange() throws Exception {
sonicAudioProcessor.setPitch(1.5f);
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.isActive()).isTrue();
}
@Test
public void testIsNotActiveWithNoChange() throws Exception {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_16BIT);
assertThat(sonicAudioProcessor.isActive()).isFalse();
}
@Test
public void testDoesNotSupportNon16BitInput() throws Exception {
try {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_8BIT);
fail();
} catch (AudioProcessor.UnhandledFormatException e) {
// Expected.
}
try {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_24BIT);
fail();
} catch (AudioProcessor.UnhandledFormatException e) {
// Expected.
}
try {
sonicAudioProcessor.configure(44100, 2, C.ENCODING_PCM_32BIT);
fail();
} catch (AudioProcessor.UnhandledFormatException e) {
// Expected.
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment