Commit 3cdc8a9e by andrewlewis Committed by Ian Baker

Use correct last timestamp for C2 MP3 workaround

The C2 MP3 decoder produces an extra output buffer when draining after
end-of-stream is queued. This output buffer has a later timestamp than the last
queued input buffer so we need to calculate its timestamp to detect a stream
change in the correct position.

Before this CL we used the original input buffer timestamp as the largest
queued timestamp, which caused the stream change to be detected at the correct
position because the original input buffer timestamp was slightly larger than
the actual last output buffer timestamp. After this change we use exact
calculated timestamp as the largest queued timestamp. I manually verified
gapless continues to work on a device using the C2 MP3 decoder by comparing
output of the MP3 gapless and MP3 gapless stripped playlists in the demo app,
and that the last buffer timestamp now matches.

#exofixit

PiperOrigin-RevId: 395428928
parent 442a5f45
...@@ -15,6 +15,8 @@ ...@@ -15,6 +15,8 @@
*/ */
package com.google.android.exoplayer2.mediacodec; package com.google.android.exoplayer2.mediacodec;
import static java.lang.Math.max;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.MpegAudioUtil; import com.google.android.exoplayer2.audio.MpegAudioUtil;
...@@ -29,13 +31,11 @@ import java.nio.ByteBuffer; ...@@ -29,13 +31,11 @@ import java.nio.ByteBuffer;
*/ */
/* package */ final class C2Mp3TimestampTracker { /* package */ final class C2Mp3TimestampTracker {
// Mirroring the actual codec, as can be found at private static final long DECODER_DELAY_FRAMES = 529;
// https://cs.android.com/android/platform/superproject/+/main:frameworks/av/media/codec2/components/mp3/C2SoftMp3Dec.h;l=55;drc=3665390c9d32a917398b240c5a46ced07a3b65eb
private static final long DECODER_DELAY_SAMPLES = 529;
private static final String TAG = "C2Mp3TimestampTracker"; private static final String TAG = "C2Mp3TimestampTracker";
private long processedSamples;
private long anchorTimestampUs; private long anchorTimestampUs;
private long processedFrames;
private boolean seenInvalidMpegAudioHeader; private boolean seenInvalidMpegAudioHeader;
/** /**
...@@ -44,8 +44,8 @@ import java.nio.ByteBuffer; ...@@ -44,8 +44,8 @@ import java.nio.ByteBuffer;
* <p>This should be done when the codec is flushed. * <p>This should be done when the codec is flushed.
*/ */
public void reset() { public void reset() {
processedSamples = 0;
anchorTimestampUs = 0; anchorTimestampUs = 0;
processedFrames = 0;
seenInvalidMpegAudioHeader = false; seenInvalidMpegAudioHeader = false;
} }
...@@ -57,6 +57,10 @@ import java.nio.ByteBuffer; ...@@ -57,6 +57,10 @@ import java.nio.ByteBuffer;
* @return The expected output presentation time, in microseconds. * @return The expected output presentation time, in microseconds.
*/ */
public long updateAndGetPresentationTimeUs(Format format, DecoderInputBuffer buffer) { public long updateAndGetPresentationTimeUs(Format format, DecoderInputBuffer buffer) {
if (processedFrames == 0) {
anchorTimestampUs = buffer.timeUs;
}
if (seenInvalidMpegAudioHeader) { if (seenInvalidMpegAudioHeader) {
return buffer.timeUs; return buffer.timeUs;
} }
...@@ -71,23 +75,32 @@ import java.nio.ByteBuffer; ...@@ -71,23 +75,32 @@ import java.nio.ByteBuffer;
int frameCount = MpegAudioUtil.parseMpegAudioFrameSampleCount(sampleHeaderData); int frameCount = MpegAudioUtil.parseMpegAudioFrameSampleCount(sampleHeaderData);
if (frameCount == C.LENGTH_UNSET) { if (frameCount == C.LENGTH_UNSET) {
seenInvalidMpegAudioHeader = true; seenInvalidMpegAudioHeader = true;
processedFrames = 0;
anchorTimestampUs = buffer.timeUs;
Log.w(TAG, "MPEG audio header is invalid."); Log.w(TAG, "MPEG audio header is invalid.");
return buffer.timeUs; return buffer.timeUs;
} }
long currentBufferTimestampUs = getBufferTimestampUs(format.sampleRate);
processedFrames += frameCount;
return currentBufferTimestampUs;
}
// These calculations mirror the timestamp calculations in the Codec2 Mp3 Decoder. /**
// https://cs.android.com/android/platform/superproject/+/main:frameworks/av/media/codec2/components/mp3/C2SoftMp3Dec.cpp;l=464;drc=ed134640332fea70ca4b05694289d91a5265bb46 * Returns the timestamp of the last buffer that will be produced if the stream ends at the
if (processedSamples == 0) { * current position, in microseconds.
anchorTimestampUs = buffer.timeUs; *
processedSamples = frameCount - DECODER_DELAY_SAMPLES; * @param format The format associated with input buffers.
return anchorTimestampUs; * @return The timestamp of the last buffer that will be produced if the stream ends at the
} * current position, in microseconds.
long processedDurationUs = getProcessedDurationUs(format); */
processedSamples += frameCount; public long getLastOutputBufferPresentationTimeUs(Format format) {
return anchorTimestampUs + processedDurationUs; return getBufferTimestampUs(format.sampleRate);
} }
private long getProcessedDurationUs(Format format) { private long getBufferTimestampUs(long sampleRate) {
return processedSamples * C.MICROS_PER_SECOND / format.sampleRate; // This calculation matches the timestamp calculation in the Codec2 Mp3 Decoder.
// https://cs.android.com/android/platform/superproject/+/main:frameworks/av/media/codec2/components/mp3/C2SoftMp3Dec.cpp;l=464;drc=ed134640332fea70ca4b05694289d91a5265bb46
return anchorTimestampUs
+ max(0, (processedFrames - DECODER_DELAY_FRAMES) * C.MICROS_PER_SECOND / sampleRate);
} }
} }
...@@ -1333,6 +1333,14 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -1333,6 +1333,14 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
if (c2Mp3TimestampTracker != null) { if (c2Mp3TimestampTracker != null) {
presentationTimeUs = presentationTimeUs =
c2Mp3TimestampTracker.updateAndGetPresentationTimeUs(inputFormat, buffer); c2Mp3TimestampTracker.updateAndGetPresentationTimeUs(inputFormat, buffer);
// When draining the C2 MP3 decoder it produces an extra non-empty buffer with a timestamp
// after all queued input buffer timestamps (unlike other decoders, which generally propagate
// the input timestamps to output buffers 1:1). To detect the end of the stream when this
// buffer is dequeued we override the largest queued timestamp accordingly.
largestQueuedPresentationTimeUs =
max(
largestQueuedPresentationTimeUs,
c2Mp3TimestampTracker.getLastOutputBufferPresentationTimeUs(inputFormat));
} }
if (buffer.isDecodeOnly()) { if (buffer.isDecodeOnly()) {
...@@ -1342,14 +1350,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -1342,14 +1350,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
formatQueue.add(presentationTimeUs, inputFormat); formatQueue.add(presentationTimeUs, inputFormat);
waitingForFirstSampleInFormat = false; waitingForFirstSampleInFormat = false;
} }
largestQueuedPresentationTimeUs = max(largestQueuedPresentationTimeUs, presentationTimeUs);
// TODO(b/158483277): Find the root cause of why a gap is introduced in MP3 playback when using
// presentationTimeUs from the c2Mp3TimestampTracker.
if (c2Mp3TimestampTracker != null) {
largestQueuedPresentationTimeUs = max(largestQueuedPresentationTimeUs, buffer.timeUs);
} else {
largestQueuedPresentationTimeUs = max(largestQueuedPresentationTimeUs, presentationTimeUs);
}
buffer.flip(); buffer.flip();
if (buffer.hasSupplementalData()) { if (buffer.hasSupplementalData()) {
handleInputBufferSupplementalData(buffer); handleInputBufferSupplementalData(buffer);
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
*/ */
package com.google.android.exoplayer2.mediacodec; package com.google.android.exoplayer2.mediacodec;
import static com.google.android.exoplayer2.testutil.TestUtil.createByteArray;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
...@@ -22,6 +23,8 @@ import com.google.android.exoplayer2.Format; ...@@ -22,6 +23,8 @@ import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
...@@ -30,49 +33,68 @@ import org.junit.runner.RunWith; ...@@ -30,49 +33,68 @@ import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class C2Mp3TimestampTrackerTest { public final class C2Mp3TimestampTrackerTest {
private static final Format AUDIO_MP3 = private static final Format FORMAT =
new Format.Builder() new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_MPEG) .setSampleMimeType(MimeTypes.AUDIO_MPEG)
.setChannelCount(2) .setChannelCount(2)
.setSampleRate(44_100) .setSampleRate(44_100)
.build(); .build();
private DecoderInputBuffer buffer;
private C2Mp3TimestampTracker timestampTracker; private C2Mp3TimestampTracker timestampTracker;
private DecoderInputBuffer buffer;
private DecoderInputBuffer invalidBuffer;
@Before @Before
public void setUp() { public void setUp() {
buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
timestampTracker = new C2Mp3TimestampTracker(); timestampTracker = new C2Mp3TimestampTracker();
buffer.data = ByteBuffer.wrap(new byte[] {-1, -5, -24, 60}); buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
buffer.data = ByteBuffer.wrap(createByteArray(0xFF, 0xFB, 0xE8, 0x3C));
buffer.timeUs = 100_000; buffer.timeUs = 100_000;
invalidBuffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
invalidBuffer.data = ByteBuffer.wrap(createByteArray(0, 0, 0, 0));
invalidBuffer.timeUs = 120_000;
} }
@Test @Test
public void whenUpdateCalledMultipleTimes_timestampsIncrease() { public void handleBuffers_outputsCorrectTimestamps() {
long first = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer); List<Long> presentationTimesUs = new ArrayList<>();
long second = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer); presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
long third = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer); presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.getLastOutputBufferPresentationTimeUs(FORMAT));
assertThat(second).isGreaterThan(first); assertThat(presentationTimesUs).containsExactly(100_000L, 114_126L, 140_249L, 166_371L);
assertThat(third).isGreaterThan(second);
} }
@Test @Test
public void whenResetCalled_timestampsDecrease() { public void handleBuffersWithReset_resetsTimestamps() {
long first = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer); List<Long> presentationTimesUs = new ArrayList<>();
long second = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer); presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
timestampTracker.reset(); timestampTracker.reset();
long third = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer); presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.getLastOutputBufferPresentationTimeUs(FORMAT));
assertThat(second).isGreaterThan(first); assertThat(presentationTimesUs).containsExactly(100_000L, 114_126L, 100_000L, 114_126L);
assertThat(third).isLessThan(second);
} }
@Test @Test
public void whenBufferTimeIsNotZero_firstSampleIsOffset() { public void handleInvalidBuffer_stopsUpdatingTimestamps() {
long first = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer); List<Long> presentationTimesUs = new ArrayList<>();
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, invalidBuffer));
presentationTimesUs.add(timestampTracker.getLastOutputBufferPresentationTimeUs(FORMAT));
assertThat(first).isEqualTo(buffer.timeUs); assertThat(presentationTimesUs).containsExactly(100_000L, 114_126L, 120_000L, 120_000L);
}
@Test
public void firstTimestamp_matchesBuffer() {
assertThat(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer))
.isEqualTo(buffer.timeUs);
timestampTracker.reset();
assertThat(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, invalidBuffer))
.isEqualTo(invalidBuffer.timeUs);
} }
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment