Commit 872cbec9 by andrewlewis Committed by Oliver Woodman

Add TrimmingAudioProcessor for gapless

Remove gapless functionality that relies on MediaCodec, and implement this in
an AudioProcessor instead.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=168547487
parent c9591d76
......@@ -529,8 +529,6 @@ public final class Format implements Parcelable {
maybeSetIntegerV16(format, "rotation-degrees", rotationDegrees);
maybeSetIntegerV16(format, MediaFormat.KEY_CHANNEL_COUNT, channelCount);
maybeSetIntegerV16(format, MediaFormat.KEY_SAMPLE_RATE, sampleRate);
maybeSetIntegerV16(format, "encoder-delay", encoderDelay);
maybeSetIntegerV16(format, "encoder-padding", encoderPadding);
for (int i = 0; i < initializationData.size(); i++) {
format.setByteBuffer("csd-" + i, ByteBuffer.wrap(initializationData.get(i)));
}
......
......@@ -40,21 +40,23 @@ import java.util.LinkedList;
* playback position smoothing, non-blocking writes and reconfiguration.
* <p>
* Before starting playback, specify the input format by calling
* {@link #configure(String, int, int, int, int)}. Optionally call {@link #setAudioSessionId(int)},
* {@link #setAudioAttributes(AudioAttributes)}, {@link #enableTunnelingV21(int)} and
* {@link #disableTunneling()} to configure audio playback. These methods may be called after
* writing data to the track, in which case it will be reinitialized as required.
* {@link #configure(String, int, int, int, int, int[], int, int)}. Optionally call
* {@link #setAudioSessionId(int)}, {@link #setAudioAttributes(AudioAttributes)},
* {@link #enableTunnelingV21(int)} and {@link #disableTunneling()} to configure audio playback.
* These methods may be called after writing data to the track, in which case it will be
* reinitialized as required.
* <p>
* Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()}
* when the data being fed is discontinuous. Call {@link #play()} to start playing the written data.
* <p>
* Call {@link #configure(String, int, int, int, int)} whenever the input format changes. The track
* will be reinitialized on the next call to {@link #handleBuffer(ByteBuffer, long)}.
* Call {@link #configure(String, int, int, int, int, int[], int, int)} whenever the input format
* changes. The track will be reinitialized on the next call to
* {@link #handleBuffer(ByteBuffer, long)}.
* <p>
* Calling {@link #reset()} releases the underlying {@link android.media.AudioTrack} (and so does
* calling {@link #configure(String, int, int, int, int)} unless the format is unchanged). It is
* safe to call {@link #handleBuffer(ByteBuffer, long)} after {@link #reset()} without calling
* {@link #configure(String, int, int, int, int)}.
* calling {@link #configure(String, int, int, int, int, int[], int, int)} unless the format is
* unchanged). It is safe to call {@link #handleBuffer(ByteBuffer, long)} after {@link #reset()}
* without calling {@link #configure(String, int, int, int, int, int[], int, int)}.
* <p>
* Call {@link #playToEndOfStream()} repeatedly to play out all data when no more input buffers will
* be provided via {@link #handleBuffer(ByteBuffer, long)} until the next {@link #reset}. Call
......@@ -280,6 +282,7 @@ public final class AudioTrack {
@Nullable private final AudioCapabilities audioCapabilities;
private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
private final TrimmingAudioProcessor trimmingAudioProcessor;
private final SonicAudioProcessor sonicAudioProcessor;
private final AudioProcessor[] availableAudioProcessors;
private final Listener listener;
......@@ -375,12 +378,14 @@ public final class AudioTrack {
audioTrackUtil = new AudioTrackUtil();
}
channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
trimmingAudioProcessor = new TrimmingAudioProcessor();
sonicAudioProcessor = new SonicAudioProcessor();
availableAudioProcessors = new AudioProcessor[3 + audioProcessors.length];
availableAudioProcessors = new AudioProcessor[4 + audioProcessors.length];
availableAudioProcessors[0] = new ResamplingAudioProcessor();
availableAudioProcessors[1] = channelMappingAudioProcessor;
System.arraycopy(audioProcessors, 0, availableAudioProcessors, 2, audioProcessors.length);
availableAudioProcessors[2 + audioProcessors.length] = sonicAudioProcessor;
availableAudioProcessors[2] = trimmingAudioProcessor;
System.arraycopy(audioProcessors, 0, availableAudioProcessors, 3, audioProcessors.length);
availableAudioProcessors[3 + audioProcessors.length] = sonicAudioProcessor;
playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT];
volume = 1.0f;
startMediaTimeState = START_NOT_SET;
......@@ -461,39 +466,27 @@ public final class AudioTrack {
* {@link C#ENCODING_PCM_32BIT}.
* @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a
* suitable buffer size automatically.
* @throws ConfigurationException If an error occurs configuring the track.
*/
public void configure(String mimeType, int channelCount, int sampleRate,
@C.PcmEncoding int pcmEncoding, int specifiedBufferSize) throws ConfigurationException {
configure(mimeType, channelCount, sampleRate, pcmEncoding, specifiedBufferSize, null);
}
/**
* Configures (or reconfigures) the audio track.
*
* @param mimeType The mime type.
* @param channelCount The number of channels.
* @param sampleRate The sample rate in Hz.
* @param pcmEncoding For PCM formats, the encoding used. One of {@link C#ENCODING_PCM_16BIT},
* {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and
* {@link C#ENCODING_PCM_32BIT}.
* @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a
* suitable buffer size automatically.
* @param outputChannels A mapping from input to output channels that is applied to this track's
* input as a preprocessing step, if handling PCM input. Specify {@code null} to leave the
* input unchanged. Otherwise, the element at index {@code i} specifies index of the input
* channel to map to output channel {@code i} when preprocessing input buffers. After the
* map is applied the audio data will have {@code outputChannels.length} channels.
* @param trimStartSamples The number of audio samples to trim from the start of data written to
* the track after this call.
* @param trimEndSamples The number of audio samples to trim from data written to the track
* immediately preceding the next call to {@link #reset()} or
* {@link #configure(String, int, int, int, int, int[], int, int)}.
* @throws ConfigurationException If an error occurs configuring the track.
*/
public void configure(String mimeType, int channelCount, int sampleRate,
@C.PcmEncoding int pcmEncoding, int specifiedBufferSize, int[] outputChannels)
throws ConfigurationException {
@C.PcmEncoding int pcmEncoding, int specifiedBufferSize, @Nullable int[] outputChannels,
int trimStartSamples, int trimEndSamples) throws ConfigurationException {
boolean passthrough = !MimeTypes.AUDIO_RAW.equals(mimeType);
@C.Encoding int encoding = passthrough ? getEncodingForMimeType(mimeType) : pcmEncoding;
boolean flush = false;
if (!passthrough) {
pcmFrameSize = Util.getPcmFrameSize(pcmEncoding, channelCount);
trimmingAudioProcessor.setTrimSampleCount(trimStartSamples, trimEndSamples);
channelMappingAudioProcessor.setChannelMap(outputChannels);
for (AudioProcessor audioProcessor : availableAudioProcessors) {
try {
......@@ -689,7 +682,8 @@ public final class AudioTrack {
* Returns whether the data was handled in full. If the data was not handled in full then the same
* {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed,
* except in the case of an interleaving call to {@link #reset()} (or an interleaving call to
* {@link #configure(String, int, int, int, int)} that caused the track to be reset).
* {@link #configure(String, int, int, int, int, int[], int, int)} that caused the track to be
* reset).
*
* @param buffer The buffer containing audio data.
* @param presentationTimeUs The presentation timestamp of the buffer in microseconds.
......
......@@ -52,7 +52,7 @@ import java.util.Arrays;
* Resets the channel mapping. After calling this method, call {@link #configure(int, int, int)}
* to start using the new channel map.
*
* @see AudioTrack#configure(String, int, int, int, int, int[])
* @see AudioTrack#configure(String, int, int, int, int, int[], int, int)
*/
public void setChannelMap(int[] outputChannels) {
pendingOutputChannels = outputChannels;
......
......@@ -53,6 +53,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private android.media.MediaFormat passthroughMediaFormat;
private int pcmEncoding;
private int channelCount;
private int encoderDelay;
private int encoderPadding;
private long currentPositionUs;
private boolean allowPositionDiscontinuity;
......@@ -134,8 +136,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Nullable AudioRendererEventListener eventListener,
@Nullable AudioCapabilities audioCapabilities, AudioProcessor... audioProcessors) {
super(C.TRACK_TYPE_AUDIO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys);
audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener());
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener());
}
@Override
......@@ -240,6 +242,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
pcmEncoding = MimeTypes.AUDIO_RAW.equals(newFormat.sampleMimeType) ? newFormat.pcmEncoding
: C.ENCODING_PCM_16BIT;
channelCount = newFormat.channelCount;
encoderDelay = newFormat.encoderDelay != Format.NO_VALUE ? newFormat.encoderDelay : 0;
encoderPadding = newFormat.encoderPadding != Format.NO_VALUE ? newFormat.encoderPadding : 0;
}
@Override
......@@ -262,7 +266,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
}
try {
audioTrack.configure(mimeType, channelCount, sampleRate, pcmEncoding, 0, channelMap);
audioTrack.configure(mimeType, channelCount, sampleRate, pcmEncoding, 0, channelMap,
encoderDelay, encoderPadding);
} catch (AudioTrack.ConfigurationException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
......
......@@ -78,6 +78,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
private DecoderCounters decoderCounters;
private Format inputFormat;
private int encoderDelay;
private int encoderPadding;
private SimpleDecoder<DecoderInputBuffer, ? extends SimpleOutputBuffer,
? extends AudioDecoderException> decoder;
private DecoderInputBuffer inputBuffer;
......@@ -308,7 +310,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
if (audioTrackNeedsConfigure) {
Format outputFormat = getOutputFormat();
audioTrack.configure(outputFormat.sampleMimeType, outputFormat.channelCount,
outputFormat.sampleRate, outputFormat.pcmEncoding, 0);
outputFormat.sampleRate, outputFormat.pcmEncoding, 0, null, encoderDelay, encoderPadding);
audioTrackNeedsConfigure = false;
}
......@@ -587,6 +589,9 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
audioTrackNeedsConfigure = true;
}
encoderDelay = newFormat.encoderDelay == Format.NO_VALUE ? 0 : newFormat.encoderDelay;
encoderPadding = newFormat.encoderPadding == Format.NO_VALUE ? 0 : newFormat.encoderPadding;
eventDispatcher.inputFormatChanged(newFormat);
}
......
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.Encoding;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Audio processor for trimming samples from the start/end of data.
*/
/* package */ final class TrimmingAudioProcessor implements AudioProcessor {
private boolean isActive;
private int trimStartSamples;
private int trimEndSamples;
private int channelCount;
private int pendingTrimStartBytes;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private byte[] endBuffer;
private int endBufferSize;
private boolean inputEnded;
/**
* Creates a new audio processor for trimming samples from the start/end of data.
*/
public TrimmingAudioProcessor() {
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
}
/**
* Sets the number of audio samples to trim from the start and end of audio passed to this
* processor. After calling this method, call {@link #configure(int, int, int)} to apply the new
* trimming sample counts.
*
* @param trimStartSamples The number of audio samples to trim from the start of audio.
* @param trimEndSamples The number of audio samples to trim from the end of audio.
* @see AudioTrack#configure(String, int, int, int, int, int[], int, int)
*/
public void setTrimSampleCount(int trimStartSamples, int trimEndSamples) {
this.trimStartSamples = trimStartSamples;
this.trimEndSamples = trimEndSamples;
}
@Override
public boolean configure(int sampleRateHz, int channelCount, @Encoding int encoding)
throws UnhandledFormatException {
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
this.channelCount = channelCount;
endBuffer = new byte[trimEndSamples * channelCount * 2];
endBufferSize = 0;
pendingTrimStartBytes = trimStartSamples * channelCount * 2;
boolean wasActive = isActive;
isActive = trimStartSamples != 0 || trimEndSamples != 0;
return wasActive != isActive;
}
@Override
public boolean isActive() {
return isActive;
}
@Override
public int getOutputChannelCount() {
return channelCount;
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int remaining = limit - position;
// Trim any pending start bytes from the input buffer.
int trimBytes = Math.min(remaining, pendingTrimStartBytes);
pendingTrimStartBytes -= trimBytes;
inputBuffer.position(position + trimBytes);
if (pendingTrimStartBytes > 0) {
// Nothing to output yet.
return;
}
remaining -= trimBytes;
// endBuffer must be kept as full as possible, so that we trim the right amount of media if we
// don't receive any more input. After taking into account the number of bytes needed to keep
// endBuffer as full as possible, the output should be any surplus bytes currently in endBuffer
// followed by any surplus bytes in the new inputBuffer.
int remainingBytesToOutput = endBufferSize + remaining - endBuffer.length;
if (buffer.capacity() < remainingBytesToOutput) {
buffer = ByteBuffer.allocateDirect(remainingBytesToOutput).order(ByteOrder.nativeOrder());
} else {
buffer.clear();
}
// Output from endBuffer.
int endBufferBytesToOutput = Util.constrainValue(remainingBytesToOutput, 0, endBufferSize);
buffer.put(endBuffer, 0, endBufferBytesToOutput);
remainingBytesToOutput -= endBufferBytesToOutput;
// Output from inputBuffer, restoring its limit afterwards.
int inputBufferBytesToOutput = Util.constrainValue(remainingBytesToOutput, 0, remaining);
inputBuffer.limit(inputBuffer.position() + inputBufferBytesToOutput);
buffer.put(inputBuffer);
inputBuffer.limit(limit);
remaining -= inputBufferBytesToOutput;
// Compact endBuffer, then repopulate it using the new input.
endBufferSize -= endBufferBytesToOutput;
System.arraycopy(endBuffer, endBufferBytesToOutput, endBuffer, 0, endBufferSize);
inputBuffer.get(endBuffer, endBufferSize, remaining);
endBufferSize += remaining;
buffer.flip();
outputBuffer = buffer;
}
@Override
public void queueEndOfStream() {
inputEnded = true;
}
@Override
public ByteBuffer getOutput() {
ByteBuffer outputBuffer = this.outputBuffer;
this.outputBuffer = EMPTY_BUFFER;
return outputBuffer;
}
@SuppressWarnings("ReferenceEquality")
@Override
public boolean isEnded() {
return inputEnded && outputBuffer == EMPTY_BUFFER;
}
@Override
public void flush() {
outputBuffer = EMPTY_BUFFER;
inputEnded = false;
// It's no longer necessary to trim any media from the start, but it is necessary to clear the
// end buffer and refill it.
pendingTrimStartBytes = 0;
endBufferSize = 0;
}
@Override
public void reset() {
flush();
buffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
endBuffer = null;
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment