Commit 682987a0 by andrewlewis Committed by Oliver Woodman

Separate input/output handling in BufferProcessors.

This allows BufferProcessors to partially and/or asynchronously handle
input/output. Document contract for queueInput and getOutput.

Update ResamplingBufferProcessor to use the new interface.

Separate submitting bytes vs. writing data to the AudioTrack.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=148212269
parent 89655088
...@@ -31,6 +31,7 @@ import com.google.android.exoplayer2.util.Util; ...@@ -31,6 +31,7 @@ import com.google.android.exoplayer2.util.Util;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
import java.util.ArrayList;
/** /**
* Plays audio data. The implementation delegates to an {@link android.media.AudioTrack} and handles * Plays audio data. The implementation delegates to an {@link android.media.AudioTrack} and handles
...@@ -269,7 +270,7 @@ public final class AudioTrack { ...@@ -269,7 +270,7 @@ public final class AudioTrack {
public static boolean failOnSpuriousAudioTimestamp = false; public static boolean failOnSpuriousAudioTimestamp = false;
private final AudioCapabilities audioCapabilities; private final AudioCapabilities audioCapabilities;
private final BufferProcessor[] bufferProcessors; private final BufferProcessor[] availableBufferProcessors;
private final Listener listener; private final Listener listener;
private final ConditionVariable releasingConditionVariable; private final ConditionVariable releasingConditionVariable;
private final long[] playheadOffsets; private final long[] playheadOffsets;
...@@ -290,7 +291,6 @@ public final class AudioTrack { ...@@ -290,7 +291,6 @@ public final class AudioTrack {
@C.StreamType @C.StreamType
private int streamType; private int streamType;
private boolean passthrough; private boolean passthrough;
private int pcmFrameSize;
private int bufferSize; private int bufferSize;
private long bufferSizeUs; private long bufferSizeUs;
...@@ -305,8 +305,12 @@ public final class AudioTrack { ...@@ -305,8 +305,12 @@ public final class AudioTrack {
private long lastTimestampSampleTimeUs; private long lastTimestampSampleTimeUs;
private Method getLatencyMethod; private Method getLatencyMethod;
private int pcmFrameSize;
private long submittedPcmBytes; private long submittedPcmBytes;
private long submittedEncodedFrames; private long submittedEncodedFrames;
private int outputPcmFrameSize;
private long writtenPcmBytes;
private long writtenEncodedFrames;
private int framesPerEncodedSample; private int framesPerEncodedSample;
private int startMediaTimeState; private int startMediaTimeState;
private long startMediaTimeUs; private long startMediaTimeUs;
...@@ -314,6 +318,8 @@ public final class AudioTrack { ...@@ -314,6 +318,8 @@ public final class AudioTrack {
private long latencyUs; private long latencyUs;
private float volume; private float volume;
private BufferProcessor[] bufferProcessors;
private ByteBuffer[] outputBuffers;
private ByteBuffer inputBuffer; private ByteBuffer inputBuffer;
private ByteBuffer outputBuffer; private ByteBuffer outputBuffer;
private byte[] preV21OutputBuffer; private byte[] preV21OutputBuffer;
...@@ -335,9 +341,9 @@ public final class AudioTrack { ...@@ -335,9 +341,9 @@ public final class AudioTrack {
public AudioTrack(AudioCapabilities audioCapabilities, BufferProcessor[] bufferProcessors, public AudioTrack(AudioCapabilities audioCapabilities, BufferProcessor[] bufferProcessors,
Listener listener) { Listener listener) {
this.audioCapabilities = audioCapabilities; this.audioCapabilities = audioCapabilities;
this.bufferProcessors = new BufferProcessor[bufferProcessors.length + 1]; availableBufferProcessors = new BufferProcessor[bufferProcessors.length + 1];
this.bufferProcessors[0] = new ResamplingBufferProcessor(); availableBufferProcessors[0] = new ResamplingBufferProcessor();
System.arraycopy(bufferProcessors, 0, this.bufferProcessors, 1, bufferProcessors.length); System.arraycopy(bufferProcessors, 0, availableBufferProcessors, 1, bufferProcessors.length);
this.listener = listener; this.listener = listener;
releasingConditionVariable = new ConditionVariable(true); releasingConditionVariable = new ConditionVariable(true);
if (Util.SDK_INT >= 18) { if (Util.SDK_INT >= 18) {
...@@ -360,6 +366,8 @@ public final class AudioTrack { ...@@ -360,6 +366,8 @@ public final class AudioTrack {
startMediaTimeState = START_NOT_SET; startMediaTimeState = START_NOT_SET;
streamType = C.STREAM_TYPE_DEFAULT; streamType = C.STREAM_TYPE_DEFAULT;
audioSessionId = C.AUDIO_SESSION_ID_UNSET; audioSessionId = C.AUDIO_SESSION_ID_UNSET;
this.bufferProcessors = new BufferProcessor[0];
outputBuffers = new ByteBuffer[0];
} }
/** /**
...@@ -440,14 +448,39 @@ public final class AudioTrack { ...@@ -440,14 +448,39 @@ public final class AudioTrack {
@C.PcmEncoding int pcmEncoding, int specifiedBufferSize) throws ConfigurationException { @C.PcmEncoding int pcmEncoding, int specifiedBufferSize) throws ConfigurationException {
boolean passthrough = !MimeTypes.AUDIO_RAW.equals(mimeType); boolean passthrough = !MimeTypes.AUDIO_RAW.equals(mimeType);
@C.Encoding int encoding = passthrough ? getEncodingForMimeType(mimeType) : pcmEncoding; @C.Encoding int encoding = passthrough ? getEncodingForMimeType(mimeType) : pcmEncoding;
boolean flush = false;
if (!passthrough) { if (!passthrough) {
for (BufferProcessor bufferProcessor : bufferProcessors) { pcmFrameSize = Util.getPcmFrameSize(pcmEncoding, channelCount);
// Reconfigure the buffer processors.
ArrayList<BufferProcessor> newBufferProcessors = new ArrayList<>();
for (BufferProcessor bufferProcessor : availableBufferProcessors) {
boolean wasActive = bufferProcessor.isActive();
try { try {
bufferProcessor.configure(sampleRate, channelCount, encoding); flush |= bufferProcessor.configure(sampleRate, channelCount, encoding);
} catch (BufferProcessor.UnhandledFormatException e) { } catch (BufferProcessor.UnhandledFormatException e) {
throw new ConfigurationException(e); throw new ConfigurationException(e);
} }
encoding = bufferProcessor.getOutputEncoding(); boolean isActive = bufferProcessor.isActive();
flush |= isActive != wasActive;
if (isActive) {
newBufferProcessors.add(bufferProcessor);
channelCount = bufferProcessor.getOutputChannelCount();
encoding = bufferProcessor.getOutputEncoding();
} else {
bufferProcessor.flush();
}
}
if (flush) {
int count = newBufferProcessors.size();
bufferProcessors = newBufferProcessors.toArray(new BufferProcessor[count]);
outputBuffers = new ByteBuffer[count];
for (int i = 0; i < count; i++) {
BufferProcessor bufferProcessor = bufferProcessors[i];
bufferProcessor.flush();
outputBuffers[i] = bufferProcessor.getOutput();
}
} }
} }
...@@ -502,7 +535,7 @@ public final class AudioTrack { ...@@ -502,7 +535,7 @@ public final class AudioTrack {
channelConfig = AudioFormat.CHANNEL_OUT_STEREO; channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
} }
if (isInitialized() && this.encoding == encoding && this.sampleRate == sampleRate if (!flush && isInitialized() && this.encoding == encoding && this.sampleRate == sampleRate
&& this.channelConfig == channelConfig) { && this.channelConfig == channelConfig) {
// We already have an audio track with the correct sample rate, channel config and encoding. // We already have an audio track with the correct sample rate, channel config and encoding.
return; return;
...@@ -514,8 +547,8 @@ public final class AudioTrack { ...@@ -514,8 +547,8 @@ public final class AudioTrack {
this.passthrough = passthrough; this.passthrough = passthrough;
this.sampleRate = sampleRate; this.sampleRate = sampleRate;
this.channelConfig = channelConfig; this.channelConfig = channelConfig;
pcmFrameSize = 2 * channelCount; // 2 bytes per 16-bit sample * number of channels.
outputEncoding = passthrough ? encoding : C.ENCODING_PCM_16BIT; outputEncoding = passthrough ? encoding : C.ENCODING_PCM_16BIT;
outputPcmFrameSize = Util.getPcmFrameSize(C.ENCODING_PCM_16BIT, channelCount);
if (specifiedBufferSize != 0) { if (specifiedBufferSize != 0) {
bufferSize = specifiedBufferSize; bufferSize = specifiedBufferSize;
...@@ -534,14 +567,14 @@ public final class AudioTrack { ...@@ -534,14 +567,14 @@ public final class AudioTrack {
android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, outputEncoding); android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, outputEncoding);
Assertions.checkState(minBufferSize != ERROR_BAD_VALUE); Assertions.checkState(minBufferSize != ERROR_BAD_VALUE);
int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR; int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * pcmFrameSize; int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
int maxAppBufferSize = (int) Math.max(minBufferSize, int maxAppBufferSize = (int) Math.max(minBufferSize,
durationUsToFrames(MAX_BUFFER_DURATION_US) * pcmFrameSize); durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
bufferSize = multipliedBufferSize < minAppBufferSize ? minAppBufferSize bufferSize = multipliedBufferSize < minAppBufferSize ? minAppBufferSize
: multipliedBufferSize > maxAppBufferSize ? maxAppBufferSize : multipliedBufferSize > maxAppBufferSize ? maxAppBufferSize
: multipliedBufferSize; : multipliedBufferSize;
} }
bufferSizeUs = passthrough ? C.TIME_UNSET : framesToDurationUs(pcmBytesToFrames(bufferSize)); bufferSizeUs = passthrough ? C.TIME_UNSET : framesToDurationUs(bufferSize / outputPcmFrameSize);
} }
private void initialize() throws InitializationException { private void initialize() throws InitializationException {
...@@ -616,20 +649,19 @@ public final class AudioTrack { ...@@ -616,20 +649,19 @@ public final class AudioTrack {
} }
/** /**
* Attempts to write data from a {@link ByteBuffer} to the audio track, starting from its current * Attempts to process data from a {@link ByteBuffer}, starting from its current position and
* position and ending at its limit (exclusive). The position of the {@link ByteBuffer} is * ending at its limit (exclusive). The position of the {@link ByteBuffer} is advanced by the
* advanced by the number of bytes that were successfully written. * number of bytes that were handled. {@link Listener#onPositionDiscontinuity()} will be called if
* {@link Listener#onPositionDiscontinuity()} will be called if {@code presentationTimeUs} is * {@code presentationTimeUs} is discontinuous with the last buffer handled since the last reset.
* discontinuous with the last buffer handled since the track was reset.
* <p> * <p>
* Returns whether the data was written in full. If the data was not written in full then the same * Returns whether the data was handled in full. If the data was not handled in full then the same
* {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed, * {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed,
* except in the case of an interleaving call to {@link #reset()} (or an interleaving call to * except in the case of an interleaving call to {@link #reset()} (or an interleaving call to
* {@link #configure(String, int, int, int, int)} that caused the track to be reset). * {@link #configure(String, int, int, int, int)} that caused the track to be reset).
* *
* @param buffer The buffer containing audio data to play back. * @param buffer The buffer containing audio data.
* @param presentationTimeUs Presentation timestamp of the next buffer in microseconds. * @param presentationTimeUs The presentation timestamp of the buffer in microseconds.
* @return Whether the buffer was consumed fully. * @return Whether the buffer was handled fully.
* @throws InitializationException If an error occurs initializing the track. * @throws InitializationException If an error occurs initializing the track.
* @throws WriteException If an error occurs writing the audio data. * @throws WriteException If an error occurs writing the audio data.
*/ */
...@@ -703,53 +735,100 @@ public final class AudioTrack { ...@@ -703,53 +735,100 @@ public final class AudioTrack {
} }
} }
if (passthrough) {
submittedEncodedFrames += framesPerEncodedSample;
} else {
submittedPcmBytes += buffer.remaining();
}
inputBuffer = buffer; inputBuffer = buffer;
if (!passthrough) { }
for (BufferProcessor bufferProcessor : bufferProcessors) {
buffer = bufferProcessor.handleBuffer(buffer); if (passthrough) {
// Passthrough buffers are not processed.
writeBuffer(inputBuffer, presentationTimeUs);
} else {
processBuffers(presentationTimeUs);
}
if (!inputBuffer.hasRemaining()) {
inputBuffer = null;
return true;
}
return false;
}
private void processBuffers(long avSyncPresentationTimeUs) throws WriteException {
int count = bufferProcessors.length;
int index = count;
while (index >= 0) {
ByteBuffer input = index > 0 ? outputBuffers[index - 1] : inputBuffer;
if (index == count) {
writeBuffer(input, avSyncPresentationTimeUs);
} else {
BufferProcessor bufferProcessor = bufferProcessors[index];
bufferProcessor.queueInput(input);
ByteBuffer output = bufferProcessor.getOutput();
outputBuffers[index] = output;
if (output.hasRemaining()) {
// Handle the output as input to the next buffer processor or the AudioTrack.
index++;
continue;
} }
} }
if (input.hasRemaining()) {
// The input wasn't consumed and no output was produced, so give up for now.
return;
}
// Get more input from upstream.
index--;
}
}
@SuppressWarnings("ReferenceEquality")
private boolean writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs)
throws WriteException {
if (!buffer.hasRemaining()) {
return true;
}
if (outputBuffer != null) {
Assertions.checkArgument(outputBuffer == buffer);
} else {
outputBuffer = buffer; outputBuffer = buffer;
if (Util.SDK_INT < 21) { if (Util.SDK_INT < 21) {
int bytesRemaining = outputBuffer.remaining(); int bytesRemaining = buffer.remaining();
if (preV21OutputBuffer == null || preV21OutputBuffer.length < bytesRemaining) { if (preV21OutputBuffer == null || preV21OutputBuffer.length < bytesRemaining) {
preV21OutputBuffer = new byte[bytesRemaining]; preV21OutputBuffer = new byte[bytesRemaining];
} }
int originalPosition = outputBuffer.position(); int originalPosition = buffer.position();
outputBuffer.get(preV21OutputBuffer, 0, bytesRemaining); buffer.get(preV21OutputBuffer, 0, bytesRemaining);
outputBuffer.position(originalPosition); buffer.position(originalPosition);
preV21OutputBufferOffset = 0; preV21OutputBufferOffset = 0;
} }
} }
int bytesRemaining = buffer.remaining();
if (writeOutputBuffer(presentationTimeUs)) {
inputBuffer = null;
return true;
}
return false;
}
private boolean writeOutputBuffer(long presentationTimeUs) throws WriteException {
int bytesRemaining = outputBuffer.remaining();
int bytesWritten = 0; int bytesWritten = 0;
if (Util.SDK_INT < 21) { // passthrough == false if (Util.SDK_INT < 21) { // passthrough == false
// Work out how many bytes we can write without the risk of blocking. // Work out how many bytes we can write without the risk of blocking.
int bytesPending = int bytesPending =
(int) (submittedPcmBytes - (audioTrackUtil.getPlaybackHeadPosition() * pcmFrameSize)); (int) (writtenPcmBytes - (audioTrackUtil.getPlaybackHeadPosition() * outputPcmFrameSize));
int bytesToWrite = bufferSize - bytesPending; int bytesToWrite = bufferSize - bytesPending;
if (bytesToWrite > 0) { if (bytesToWrite > 0) {
bytesToWrite = Math.min(bytesRemaining, bytesToWrite); bytesToWrite = Math.min(bytesRemaining, bytesToWrite);
bytesWritten = audioTrack.write(preV21OutputBuffer, preV21OutputBufferOffset, bytesToWrite); bytesWritten = audioTrack.write(preV21OutputBuffer, preV21OutputBufferOffset, bytesToWrite);
if (bytesWritten > 0) { if (bytesWritten > 0) {
preV21OutputBufferOffset += bytesWritten; preV21OutputBufferOffset += bytesWritten;
outputBuffer.position(outputBuffer.position() + bytesWritten); buffer.position(buffer.position() + bytesWritten);
} }
} }
} else if (tunneling) { } else if (tunneling) {
bytesWritten = writeNonBlockingWithAvSyncV21(audioTrack, outputBuffer, bytesRemaining, Assertions.checkState(avSyncPresentationTimeUs != C.TIME_UNSET);
presentationTimeUs); bytesWritten = writeNonBlockingWithAvSyncV21(audioTrack, buffer, bytesRemaining,
avSyncPresentationTimeUs);
} else { } else {
bytesWritten = writeNonBlockingV21(audioTrack, outputBuffer, bytesRemaining); bytesWritten = writeNonBlockingV21(audioTrack, buffer, bytesRemaining);
} }
lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime(); lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
...@@ -759,12 +838,13 @@ public final class AudioTrack { ...@@ -759,12 +838,13 @@ public final class AudioTrack {
} }
if (!passthrough) { if (!passthrough) {
submittedPcmBytes += bytesWritten; writtenPcmBytes += bytesWritten;
} }
if (bytesWritten == bytesRemaining) { if (bytesWritten == bytesRemaining) {
if (passthrough) { if (passthrough) {
submittedEncodedFrames += framesPerEncodedSample; writtenEncodedFrames += framesPerEncodedSample;
} }
outputBuffer = null;
return true; return true;
} }
return false; return false;
...@@ -775,7 +855,8 @@ public final class AudioTrack { ...@@ -775,7 +855,8 @@ public final class AudioTrack {
*/ */
public void handleEndOfStream() { public void handleEndOfStream() {
if (isInitialized()) { if (isInitialized()) {
audioTrackUtil.handleEndOfStream(getSubmittedFrames()); // TODO: Drain buffer processors before stopping the AudioTrack.
audioTrackUtil.handleEndOfStream(getWrittenFrames());
bytesUntilNextAvSync = 0; bytesUntilNextAvSync = 0;
} }
} }
...@@ -785,7 +866,7 @@ public final class AudioTrack { ...@@ -785,7 +866,7 @@ public final class AudioTrack {
*/ */
public boolean hasPendingData() { public boolean hasPendingData() {
return isInitialized() return isInitialized()
&& (getSubmittedFrames() > audioTrackUtil.getPlaybackHeadPosition() && (getWrittenFrames() > audioTrackUtil.getPlaybackHeadPosition()
|| overrideHasPendingData()); || overrideHasPendingData());
} }
...@@ -838,6 +919,11 @@ public final class AudioTrack { ...@@ -838,6 +919,11 @@ public final class AudioTrack {
/** /**
* Enables tunneling. The audio track is reset if tunneling was previously disabled or if the * Enables tunneling. The audio track is reset if tunneling was previously disabled or if the
* audio session id has changed. Enabling tunneling requires platform API version 21 onwards. * audio session id has changed. Enabling tunneling requires platform API version 21 onwards.
* <p>
* If this instance has {@link BufferProcessor}s and tunneling is enabled, care must be taken that
* buffer processors do not output buffers with a different duration than their input, and buffer
* processors must produce output corresponding to their last input immediately after that input
* is queued.
* *
* @param tunnelingAudioSessionId The audio session id to use. * @param tunnelingAudioSessionId The audio session id to use.
* @throws IllegalStateException Thrown if enabling tunneling on platform API version &lt; 21. * @throws IllegalStateException Thrown if enabling tunneling on platform API version &lt; 21.
...@@ -907,12 +993,17 @@ public final class AudioTrack { ...@@ -907,12 +993,17 @@ public final class AudioTrack {
if (isInitialized()) { if (isInitialized()) {
submittedPcmBytes = 0; submittedPcmBytes = 0;
submittedEncodedFrames = 0; submittedEncodedFrames = 0;
writtenPcmBytes = 0;
writtenEncodedFrames = 0;
framesPerEncodedSample = 0; framesPerEncodedSample = 0;
inputBuffer = null; inputBuffer = null;
avSyncHeader = null; outputBuffer = null;
for (BufferProcessor bufferProcessor : bufferProcessors) { for (int i = 0; i < bufferProcessors.length; i++) {
BufferProcessor bufferProcessor = bufferProcessors[i];
bufferProcessor.flush(); bufferProcessor.flush();
outputBuffers[i] = bufferProcessor.getOutput();
} }
avSyncHeader = null;
bytesUntilNextAvSync = 0; bytesUntilNextAvSync = 0;
startMediaTimeState = START_NOT_SET; startMediaTimeState = START_NOT_SET;
latencyUs = 0; latencyUs = 0;
...@@ -946,7 +1037,7 @@ public final class AudioTrack { ...@@ -946,7 +1037,7 @@ public final class AudioTrack {
public void release() { public void release() {
reset(); reset();
releaseKeepSessionIdAudioTrack(); releaseKeepSessionIdAudioTrack();
for (BufferProcessor bufferProcessor : bufferProcessors) { for (BufferProcessor bufferProcessor : availableBufferProcessors) {
bufferProcessor.release(); bufferProcessor.release();
} }
audioSessionId = C.AUDIO_SESSION_ID_UNSET; audioSessionId = C.AUDIO_SESSION_ID_UNSET;
...@@ -1092,10 +1183,6 @@ public final class AudioTrack { ...@@ -1092,10 +1183,6 @@ public final class AudioTrack {
return audioTrack != null; return audioTrack != null;
} }
private long pcmBytesToFrames(long byteCount) {
return byteCount / pcmFrameSize;
}
private long framesToDurationUs(long frameCount) { private long framesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / sampleRate; return (frameCount * C.MICROS_PER_SECOND) / sampleRate;
} }
...@@ -1105,7 +1192,11 @@ public final class AudioTrack { ...@@ -1105,7 +1192,11 @@ public final class AudioTrack {
} }
private long getSubmittedFrames() { private long getSubmittedFrames() {
return passthrough ? submittedEncodedFrames : pcmBytesToFrames(submittedPcmBytes); return passthrough ? submittedEncodedFrames : (submittedPcmBytes / pcmFrameSize);
}
private long getWrittenFrames() {
return passthrough ? writtenEncodedFrames : (writtenPcmBytes / outputPcmFrameSize);
} }
private void resetSyncParams() { private void resetSyncParams() {
......
...@@ -17,6 +17,7 @@ package com.google.android.exoplayer2.audio; ...@@ -17,6 +17,7 @@ package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/** /**
* Interface for processors of audio buffers. * Interface for processors of audio buffers.
...@@ -36,30 +37,61 @@ public interface BufferProcessor { ...@@ -36,30 +37,61 @@ public interface BufferProcessor {
} }
/** /**
* Configures this processor to take input buffers with the specified format. * An empty, direct {@link ByteBuffer}.
*/
ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
/**
* Configures the processor to process input buffers with the specified format and returns whether
* the processor must be flushed. After calling this method, {@link #isActive()} returns whether
* the processor needs to handle buffers; if not, the processor will not accept any buffers until
* it is reconfigured. {@link #getOutputChannelCount()} and {@link #getOutputEncoding()} return
* the processor's output format.
* *
* @param sampleRateHz The sample rate of input audio in Hz. * @param sampleRateHz The sample rate of input audio in Hz.
* @param channelCount The number of interleaved channels in input audio. * @param channelCount The number of interleaved channels in input audio.
* @param encoding The encoding of input audio. * @param encoding The encoding of input audio.
* @return Whether the processor must be flushed.
* @throws UnhandledFormatException Thrown if the specified format can't be handled as input. * @throws UnhandledFormatException Thrown if the specified format can't be handled as input.
*/ */
void configure(int sampleRateHz, int channelCount, @C.Encoding int encoding) boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws UnhandledFormatException; throws UnhandledFormatException;
/** /**
* Returns the encoding used in buffers output by this processor. * Returns whether the processor is configured and active.
*/
boolean isActive();
/**
* Returns the number of audio channels in the data output by the processor.
*/
int getOutputChannelCount();
/**
* Returns the audio encoding used in the data output by the processor.
*/ */
@C.Encoding @C.Encoding
int getOutputEncoding(); int getOutputEncoding();
/** /**
* Processes the data in the specified input buffer in its entirety. * Queues audio data between the position and limit of the input {@code buffer} for processing.
* {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as
* read-only. Its position will be advanced by the number of bytes consumed (which may be zero).
* The caller retains ownership of the provided buffer. Calling this method invalidates any
* previous buffer returned by {@link #getOutput()}.
*
* @param buffer The input buffer to process.
*/
void queueInput(ByteBuffer buffer);
/**
* Returns a buffer containing processed output data between its position and limit. The buffer
* will always be a direct byte buffer with native byte order. Calling this method invalidates any
* previously returned buffer. The buffer will be empty if no output is available.
* *
* @param input A buffer containing the input data to process. * @return A buffer containing processed output data between its position and limit.
* @return A buffer containing the processed output. This may be the same as the input buffer if
* no processing was required.
*/ */
ByteBuffer handleBuffer(ByteBuffer input); ByteBuffer getOutput();
/** /**
* Clears any state in preparation for receiving a new stream of buffers. * Clears any state in preparation for receiving a new stream of buffers.
......
...@@ -18,31 +18,55 @@ package com.google.android.exoplayer2.audio; ...@@ -18,31 +18,55 @@ package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/** /**
* A {@link BufferProcessor} that outputs buffers in {@link C#ENCODING_PCM_16BIT}. * A {@link BufferProcessor} that converts audio data to {@link C#ENCODING_PCM_16BIT}.
*/ */
/* package */ final class ResamplingBufferProcessor implements BufferProcessor { /* package */ final class ResamplingBufferProcessor implements BufferProcessor {
private int channelCount;
@C.PcmEncoding @C.PcmEncoding
private int encoding; private int encoding;
private ByteBuffer buffer;
private ByteBuffer outputBuffer; private ByteBuffer outputBuffer;
/**
* Creates a new buffer processor that converts audio data to {@link C#ENCODING_PCM_16BIT}.
*/
public ResamplingBufferProcessor() { public ResamplingBufferProcessor() {
encoding = C.ENCODING_INVALID; encoding = C.ENCODING_INVALID;
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
} }
@Override @Override
public void configure(int sampleRateHz, int channelCount, @C.Encoding int encoding) public boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws UnhandledFormatException { throws UnhandledFormatException {
if (encoding != C.ENCODING_PCM_8BIT && encoding != C.ENCODING_PCM_16BIT if (encoding != C.ENCODING_PCM_8BIT && encoding != C.ENCODING_PCM_16BIT
&& encoding != C.ENCODING_PCM_24BIT && encoding != C.ENCODING_PCM_32BIT) { && encoding != C.ENCODING_PCM_24BIT && encoding != C.ENCODING_PCM_32BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding); throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
} }
if (encoding == C.ENCODING_PCM_16BIT) { this.channelCount = channelCount;
outputBuffer = null; if (this.encoding == encoding) {
return false;
} }
this.encoding = encoding; this.encoding = encoding;
if (encoding == C.ENCODING_PCM_16BIT) {
buffer = EMPTY_BUFFER;
}
return true;
}
@Override
public boolean isActive() {
return encoding != C.ENCODING_INVALID && encoding != C.ENCODING_PCM_16BIT;
}
@Override
public int getOutputChannelCount() {
return channelCount;
} }
@Override @Override
...@@ -51,16 +75,13 @@ import java.nio.ByteBuffer; ...@@ -51,16 +75,13 @@ import java.nio.ByteBuffer;
} }
@Override @Override
public ByteBuffer handleBuffer(ByteBuffer buffer) { public void queueInput(ByteBuffer inputBuffer) {
int position = buffer.position(); // Prepare the output buffer.
int limit = buffer.limit(); int position = inputBuffer.position();
int limit = inputBuffer.limit();
int size = limit - position; int size = limit - position;
int resampledSize; int resampledSize;
switch (encoding) { switch (encoding) {
case C.ENCODING_PCM_16BIT:
// No processing required.
return buffer;
case C.ENCODING_PCM_8BIT: case C.ENCODING_PCM_8BIT:
resampledSize = size * 2; resampledSize = size * 2;
break; break;
...@@ -70,40 +91,39 @@ import java.nio.ByteBuffer; ...@@ -70,40 +91,39 @@ import java.nio.ByteBuffer;
case C.ENCODING_PCM_32BIT: case C.ENCODING_PCM_32BIT:
resampledSize = size / 2; resampledSize = size / 2;
break; break;
case C.ENCODING_PCM_16BIT:
case C.ENCODING_INVALID: case C.ENCODING_INVALID:
case Format.NO_VALUE: case Format.NO_VALUE:
default: default:
// Never happens.
throw new IllegalStateException(); throw new IllegalStateException();
} }
if (buffer.capacity() < resampledSize) {
if (outputBuffer == null || outputBuffer.capacity() < resampledSize) { buffer = ByteBuffer.allocateDirect(resampledSize).order(ByteOrder.nativeOrder());
outputBuffer = ByteBuffer.allocateDirect(resampledSize).order(buffer.order());
} else { } else {
outputBuffer.clear(); buffer.clear();
} }
// Samples are little endian. // Resample the little endian input and update the input/output buffers.
switch (encoding) { switch (encoding) {
case C.ENCODING_PCM_8BIT: case C.ENCODING_PCM_8BIT:
// 8->16 bit resampling. Shift each byte from [0, 256) to [-128, 128) and scale up. // 8->16 bit resampling. Shift each byte from [0, 256) to [-128, 128) and scale up.
for (int i = position; i < limit; i++) { for (int i = position; i < limit; i++) {
outputBuffer.put((byte) 0); buffer.put((byte) 0);
outputBuffer.put((byte) ((buffer.get(i) & 0xFF) - 128)); buffer.put((byte) ((inputBuffer.get(i) & 0xFF) - 128));
} }
break; break;
case C.ENCODING_PCM_24BIT: case C.ENCODING_PCM_24BIT:
// 24->16 bit resampling. Drop the least significant byte. // 24->16 bit resampling. Drop the least significant byte.
for (int i = position; i < limit; i += 3) { for (int i = position; i < limit; i += 3) {
outputBuffer.put(buffer.get(i + 1)); buffer.put(inputBuffer.get(i + 1));
outputBuffer.put(buffer.get(i + 2)); buffer.put(inputBuffer.get(i + 2));
} }
break; break;
case C.ENCODING_PCM_32BIT: case C.ENCODING_PCM_32BIT:
// 32->16 bit resampling. Drop the two least significant bytes. // 32->16 bit resampling. Drop the two least significant bytes.
for (int i = position; i < limit; i += 4) { for (int i = position; i < limit; i += 4) {
outputBuffer.put(buffer.get(i + 2)); buffer.put(inputBuffer.get(i + 2));
outputBuffer.put(buffer.get(i + 3)); buffer.put(inputBuffer.get(i + 3));
} }
break; break;
case C.ENCODING_PCM_16BIT: case C.ENCODING_PCM_16BIT:
...@@ -113,19 +133,27 @@ import java.nio.ByteBuffer; ...@@ -113,19 +133,27 @@ import java.nio.ByteBuffer;
// Never happens. // Never happens.
throw new IllegalStateException(); throw new IllegalStateException();
} }
inputBuffer.position(inputBuffer.limit());
buffer.flip();
outputBuffer = buffer;
}
outputBuffer.flip(); @Override
public ByteBuffer getOutput() {
ByteBuffer outputBuffer = this.outputBuffer;
this.outputBuffer = EMPTY_BUFFER;
return outputBuffer; return outputBuffer;
} }
@Override @Override
public void flush() { public void flush() {
// Do nothing. outputBuffer = EMPTY_BUFFER;
} }
@Override @Override
public void release() { public void release() {
outputBuffer = null; buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
} }
} }
...@@ -768,6 +768,28 @@ public final class Util { ...@@ -768,6 +768,28 @@ public final class Util {
} }
/** /**
* Returns the frame size for audio with {@code channelCount} channels in the specified encoding.
*
* @param pcmEncoding The encoding of the audio data.
* @param channelCount The channel count.
* @return The size of one audio frame in bytes.
*/
public static int getPcmFrameSize(@C.PcmEncoding int pcmEncoding, int channelCount) {
switch (pcmEncoding) {
case C.ENCODING_PCM_8BIT:
return channelCount;
case C.ENCODING_PCM_16BIT:
return channelCount * 2;
case C.ENCODING_PCM_24BIT:
return channelCount * 3;
case C.ENCODING_PCM_32BIT:
return channelCount * 4;
default:
throw new IllegalArgumentException();
}
}
/**
* Makes a best guess to infer the type from a file name. * Makes a best guess to infer the type from a file name.
* *
* @param fileName Name of the file. It can include the path of the file. * @param fileName Name of the file. It can include the path of the file.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment