Commit 71fd335b by olly Committed by kim-vde

Simplify output format propagation

PiperOrigin-RevId: 324805335
parent 4d03d308
......@@ -2188,11 +2188,9 @@ public class SimpleExoPlayer extends BasePlayer
}
@Override
public void onVideoFrameProcessingOffset(
long totalProcessingOffsetUs, int frameCount, Format format) {
public void onVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) {
for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
videoDebugListener.onVideoFrameProcessingOffset(
totalProcessingOffsetUs, frameCount, format);
videoDebugListener.onVideoFrameProcessingOffset(totalProcessingOffsetUs, frameCount);
}
}
......
......@@ -319,11 +319,10 @@ public class AnalyticsCollector
}
@Override
public final void onVideoFrameProcessingOffset(
long totalProcessingOffsetUs, int frameCount, Format format) {
public final void onVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) {
EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) {
listener.onVideoFrameProcessingOffset(eventTime, totalProcessingOffsetUs, frameCount, format);
listener.onVideoFrameProcessingOffset(eventTime, totalProcessingOffsetUs, frameCount);
}
}
......
......@@ -591,10 +591,9 @@ public interface AnalyticsListener {
* @param totalProcessingOffsetUs The sum of the video frame processing offsets for frames
* rendered since the last call to this method.
* @param frameCount The number to samples included in {@code totalProcessingOffsetUs}.
* @param format The video {@link Format} being rendered.
*/
default void onVideoFrameProcessingOffset(
EventTime eventTime, long totalProcessingOffsetUs, int frameCount, Format format) {}
EventTime eventTime, long totalProcessingOffsetUs, int frameCount) {}
/**
* Called when a frame is rendered for the first time since setting the surface, or since the
......
......@@ -91,7 +91,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private boolean codecNeedsDiscardChannelsWorkaround;
private boolean codecNeedsEosBufferTimestampWorkaround;
@Nullable private Format codecPassthroughFormat;
@Nullable private Format inputFormat;
private long currentPositionUs;
private boolean allowFirstBufferPositionDiscontinuity;
private boolean allowPositionDiscontinuity;
......@@ -379,29 +378,23 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Override
protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException {
super.onInputFormatChanged(formatHolder);
inputFormat = formatHolder.format;
eventDispatcher.inputFormatChanged(inputFormat);
eventDispatcher.inputFormatChanged(formatHolder.format);
}
@Override
protected void onOutputFormatChanged(Format outputFormat) throws ExoPlaybackException {
configureOutput(outputFormat);
}
@Override
protected void configureOutput(Format outputFormat) throws ExoPlaybackException {
protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat)
throws ExoPlaybackException {
Format audioSinkInputFormat;
@Nullable int[] channelMap = null;
if (codecPassthroughFormat != null) { // Raw codec passthrough
audioSinkInputFormat = codecPassthroughFormat;
} else if (getCodec() == null) { // Codec bypass passthrough
audioSinkInputFormat = outputFormat;
audioSinkInputFormat = format;
} else {
MediaFormat mediaFormat = getCodec().getOutputFormat();
@C.PcmEncoding int pcmEncoding;
if (MimeTypes.AUDIO_RAW.equals(outputFormat.sampleMimeType)) {
if (MimeTypes.AUDIO_RAW.equals(format.sampleMimeType)) {
// For PCM streams, the encoder passes through int samples despite set to float mode.
pcmEncoding = outputFormat.pcmEncoding;
pcmEncoding = format.pcmEncoding;
} else if (Util.SDK_INT >= 24 && mediaFormat.containsKey(MediaFormat.KEY_PCM_ENCODING)) {
pcmEncoding = mediaFormat.getInteger(MediaFormat.KEY_PCM_ENCODING);
} else if (mediaFormat.containsKey(VIVO_BITS_PER_SAMPLE_KEY)) {
......@@ -409,22 +402,25 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} else {
// If the format is anything other than PCM then we assume that the audio decoder will
// output 16-bit PCM.
pcmEncoding = C.ENCODING_PCM_16BIT;
pcmEncoding =
MimeTypes.AUDIO_RAW.equals(format.sampleMimeType)
? format.pcmEncoding
: C.ENCODING_PCM_16BIT;
}
audioSinkInputFormat =
new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_RAW)
.setPcmEncoding(pcmEncoding)
.setEncoderDelay(outputFormat.encoderDelay)
.setEncoderPadding(outputFormat.encoderPadding)
.setEncoderDelay(format.encoderDelay)
.setEncoderPadding(format.encoderPadding)
.setChannelCount(mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT))
.setSampleRate(mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE))
.build();
if (codecNeedsDiscardChannelsWorkaround
&& audioSinkInputFormat.channelCount == 6
&& outputFormat.channelCount < 6) {
channelMap = new int[outputFormat.channelCount];
for (int i = 0; i < outputFormat.channelCount; i++) {
&& format.channelCount < 6) {
channelMap = new int[format.channelCount];
for (int i = 0; i < format.channelCount; i++) {
channelMap[i] = i;
}
}
......@@ -432,7 +428,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
try {
audioSink.configure(audioSinkInputFormat, /* specifiedBufferSize= */ 0, channelMap);
} catch (AudioSink.ConfigurationException e) {
throw createRendererException(e, outputFormat);
throw createRendererException(e, format);
}
}
......@@ -621,8 +617,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
try {
audioSink.playToEndOfStream();
} catch (AudioSink.WriteException e) {
Format outputFormat = getCurrentOutputFormat();
throw createRendererException(e, outputFormat != null ? outputFormat : inputFormat);
@Nullable Format outputFormat = getOutputFormat();
throw createRendererException(e, outputFormat != null ? outputFormat : getInputFormat());
}
}
......
......@@ -153,9 +153,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
private long totalVideoFrameProcessingOffsetUs;
private int videoFrameProcessingOffsetCount;
@Nullable private MediaFormat currentMediaFormat;
private int mediaFormatWidth;
private int mediaFormatHeight;
private int currentWidth;
private int currentHeight;
private int currentUnappliedRotationDegrees;
......@@ -262,8 +259,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
currentHeight = Format.NO_VALUE;
currentPixelWidthHeightRatio = Format.NO_VALUE;
scalingMode = VIDEO_SCALING_MODE_DEFAULT;
mediaFormatWidth = Format.NO_VALUE;
mediaFormatHeight = Format.NO_VALUE;
clearReportedVideoSize();
}
......@@ -449,7 +444,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override
protected void onDisabled() {
currentMediaFormat = null;
clearReportedVideoSize();
clearRenderedFirstFrame();
frameReleaseTimeHelper.disable();
......@@ -668,51 +662,37 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
@Override
protected void onOutputMediaFormatChanged(MediaCodec codec, MediaFormat outputMediaFormat) {
currentMediaFormat = outputMediaFormat;
boolean hasCrop =
outputMediaFormat.containsKey(KEY_CROP_RIGHT)
&& outputMediaFormat.containsKey(KEY_CROP_LEFT)
&& outputMediaFormat.containsKey(KEY_CROP_BOTTOM)
&& outputMediaFormat.containsKey(KEY_CROP_TOP);
mediaFormatWidth =
hasCrop
? outputMediaFormat.getInteger(KEY_CROP_RIGHT)
- outputMediaFormat.getInteger(KEY_CROP_LEFT)
+ 1
: outputMediaFormat.getInteger(MediaFormat.KEY_WIDTH);
mediaFormatHeight =
hasCrop
? outputMediaFormat.getInteger(KEY_CROP_BOTTOM)
- outputMediaFormat.getInteger(KEY_CROP_TOP)
+ 1
: outputMediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
// Must be applied each time the output MediaFormat changes.
codec.setVideoScalingMode(scalingMode);
maybeNotifyVideoFrameProcessingOffset();
}
@Override
protected void onOutputFormatChanged(Format outputFormat) {
configureOutput(outputFormat);
}
@Override
protected void configureOutput(Format outputFormat) {
protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat) {
@Nullable MediaCodec codec = getCodec();
if (codec != null) {
// Must be applied each time the output format changes.
codec.setVideoScalingMode(scalingMode);
}
if (tunneling) {
currentWidth = outputFormat.width;
currentHeight = outputFormat.height;
currentWidth = format.width;
currentHeight = format.height;
} else {
currentWidth = mediaFormatWidth;
currentHeight = mediaFormatHeight;
}
currentPixelWidthHeightRatio = outputFormat.pixelWidthHeightRatio;
Assertions.checkNotNull(mediaFormat);
boolean hasCrop =
mediaFormat.containsKey(KEY_CROP_RIGHT)
&& mediaFormat.containsKey(KEY_CROP_LEFT)
&& mediaFormat.containsKey(KEY_CROP_BOTTOM)
&& mediaFormat.containsKey(KEY_CROP_TOP);
currentWidth =
hasCrop
? mediaFormat.getInteger(KEY_CROP_RIGHT) - mediaFormat.getInteger(KEY_CROP_LEFT) + 1
: mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
currentHeight =
hasCrop
? mediaFormat.getInteger(KEY_CROP_BOTTOM) - mediaFormat.getInteger(KEY_CROP_TOP) + 1
: mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
}
currentPixelWidthHeightRatio = format.pixelWidthHeightRatio;
if (Util.SDK_INT >= 21) {
// On API level 21 and above the decoder applies the rotation when rendering to the surface.
// Hence currentUnappliedRotation should always be 0. For 90 and 270 degree rotations, we need
// to flip the width, height and pixel aspect ratio to reflect the rotation that was applied.
if (outputFormat.rotationDegrees == 90 || outputFormat.rotationDegrees == 270) {
if (format.rotationDegrees == 90 || format.rotationDegrees == 270) {
int rotatedHeight = currentWidth;
currentWidth = currentHeight;
currentHeight = rotatedHeight;
......@@ -720,9 +700,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
} else {
// On API level 20 and below the decoder does not apply the rotation.
currentUnappliedRotationDegrees = outputFormat.rotationDegrees;
currentUnappliedRotationDegrees = format.rotationDegrees;
}
currentFrameRate = outputFormat.frameRate;
currentFrameRate = format.frameRate;
updateSurfaceFrameRate(/* isNewSurface= */ false);
}
......@@ -811,7 +791,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|| (isStarted && shouldForceRenderOutputBuffer(earlyUs, elapsedSinceLastRenderUs)));
if (forceRenderOutputBuffer) {
long releaseTimeNs = System.nanoTime();
notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format, currentMediaFormat);
notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format);
if (Util.SDK_INT >= 21) {
renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, releaseTimeNs);
} else {
......@@ -857,8 +837,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
if (Util.SDK_INT >= 21) {
// Let the underlying framework time the release.
if (earlyUs < 50000) {
notifyFrameMetadataListener(
presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
notifyFrameMetadataListener(presentationTimeUs, adjustedReleaseTimeNs, format);
renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, adjustedReleaseTimeNs);
updateVideoFrameProcessingOffsetCounters(earlyUs);
return true;
......@@ -877,8 +856,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
return false;
}
}
notifyFrameMetadataListener(
presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
notifyFrameMetadataListener(presentationTimeUs, adjustedReleaseTimeNs, format);
renderOutputBuffer(codec, bufferIndex, presentationTimeUs);
updateVideoFrameProcessingOffsetCounters(earlyUs);
return true;
......@@ -890,10 +868,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
private void notifyFrameMetadataListener(
long presentationTimeUs, long releaseTimeNs, Format format, MediaFormat mediaFormat) {
long presentationTimeUs, long releaseTimeNs, Format format) {
if (frameMetadataListener != null) {
frameMetadataListener.onVideoFrameAboutToBeRendered(
presentationTimeUs, releaseTimeNs, format, mediaFormat);
presentationTimeUs, releaseTimeNs, format, getCodecOutputMediaFormat());
}
}
......@@ -1230,10 +1208,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
private void maybeNotifyVideoFrameProcessingOffset() {
@Nullable Format outputFormat = getCurrentOutputFormat();
if (outputFormat != null && videoFrameProcessingOffsetCount != 0) {
if (videoFrameProcessingOffsetCount != 0) {
eventDispatcher.reportVideoFrameProcessingOffset(
totalVideoFrameProcessingOffsetUs, videoFrameProcessingOffsetCount, outputFormat);
totalVideoFrameProcessingOffsetUs, videoFrameProcessingOffsetCount);
totalVideoFrameProcessingOffsetUs = 0;
videoFrameProcessingOffsetCount = 0;
}
......
......@@ -88,10 +88,8 @@ public interface VideoRendererEventListener {
* @param totalProcessingOffsetUs The sum of all video frame processing offset samples for the
* video frames processed by the renderer in microseconds.
* @param frameCount The number of samples included in the {@code totalProcessingOffsetUs}.
* @param format The {@link Format} that is currently output.
*/
default void onVideoFrameProcessingOffset(
long totalProcessingOffsetUs, int frameCount, Format format) {}
default void onVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) {}
/**
* Called before a frame is rendered for the first time since setting the surface, and each time
......@@ -182,13 +180,12 @@ public interface VideoRendererEventListener {
}
/** Invokes {@link VideoRendererEventListener#onVideoFrameProcessingOffset}. */
public void reportVideoFrameProcessingOffset(
long totalProcessingOffsetUs, int frameCount, Format format) {
public void reportVideoFrameProcessingOffset(long totalProcessingOffsetUs, int frameCount) {
if (handler != null) {
handler.post(
() ->
castNonNull(listener)
.onVideoFrameProcessingOffset(totalProcessingOffsetUs, frameCount, format));
.onVideoFrameProcessingOffset(totalProcessingOffsetUs, frameCount));
}
}
......
......@@ -1955,7 +1955,7 @@ public final class AnalyticsCollectorTest {
@Override
public void onVideoFrameProcessingOffset(
EventTime eventTime, long totalProcessingOffsetUs, int frameCount, Format format) {
EventTime eventTime, long totalProcessingOffsetUs, int frameCount) {
reportedEvents.add(new ReportedEvent(EVENT_VIDEO_FRAME_PROCESSING_OFFSET, eventTime));
}
......
......@@ -25,7 +25,9 @@ import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.media.MediaFormat;
import android.os.SystemClock;
import androidx.annotation.Nullable;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
......@@ -216,15 +218,16 @@ public class MediaCodecAudioRendererTest {
/* eventHandler= */ null,
/* eventListener= */ null) {
@Override
protected void onOutputFormatChanged(Format outputFormat) throws ExoPlaybackException {
super.onOutputFormatChanged(outputFormat);
if (!outputFormat.equals(AUDIO_AAC)) {
protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat)
throws ExoPlaybackException {
super.onOutputFormatChanged(format, mediaFormat);
if (!format.equals(AUDIO_AAC)) {
setPendingPlaybackException(
ExoPlaybackException.createForRenderer(
new AudioSink.ConfigurationException("Test"),
"rendererName",
/* rendererIndex= */ 0,
outputFormat,
format,
FORMAT_HANDLED));
}
}
......@@ -254,8 +257,11 @@ public class MediaCodecAudioRendererTest {
exceptionThrowingRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
exceptionThrowingRenderer.render(/* positionUs= */ 250, SystemClock.elapsedRealtime() * 1000);
MediaFormat mediaFormat = new MediaFormat();
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 2);
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 32_000);
// Simulating the exception being thrown when not traceable back to render.
exceptionThrowingRenderer.onOutputFormatChanged(changedFormat);
exceptionThrowingRenderer.onOutputFormatChanged(changedFormat, mediaFormat);
assertThrows(
ExoPlaybackException.class,
......
......@@ -29,6 +29,7 @@ import static org.mockito.Mockito.verify;
import static org.robolectric.Shadows.shadowOf;
import android.graphics.SurfaceTexture;
import android.media.MediaFormat;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock;
......@@ -37,7 +38,6 @@ import androidx.annotation.Nullable;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.Renderer;
import com.google.android.exoplayer2.RendererCapabilities;
......@@ -113,9 +113,9 @@ public class MediaCodecVideoRendererTest {
}
@Override
protected void onOutputFormatChanged(Format outputFormat) {
super.onOutputFormatChanged(outputFormat);
currentOutputFormat = outputFormat;
protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat) {
super.onOutputFormatChanged(format, mediaFormat);
currentOutputFormat = format;
}
};
......@@ -458,59 +458,4 @@ public class MediaCodecVideoRendererTest {
shadowLooper.idle();
verify(eventListener, times(2)).onRenderedFirstFrame(any());
}
@Test
public void onVideoFrameProcessingOffset_isCalledAfterOutputFormatChanges()
throws ExoPlaybackException {
Format mp4Uhd = VIDEO_H264.buildUpon().setWidth(3840).setHeight(2160).build();
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DUMMY,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ mp4Uhd,
ImmutableList.of(
oneByteSample(/* timeUs= */ 0, C.BUFFER_FLAG_KEY_FRAME),
format(VIDEO_H264),
oneByteSample(/* timeUs= */ 50, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 100),
format(mp4Uhd),
oneByteSample(/* timeUs= */ 150, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 200),
oneByteSample(/* timeUs= */ 250),
format(VIDEO_H264),
oneByteSample(/* timeUs= */ 300, C.BUFFER_FLAG_KEY_FRAME),
FakeSampleStreamItem.END_OF_STREAM_ITEM));
mediaCodecVideoRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {mp4Uhd},
fakeSampleStream,
/* positionUs= */ 0,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* offsetUs */ 0);
mediaCodecVideoRenderer.setCurrentStreamFinal();
mediaCodecVideoRenderer.start();
int positionUs = 10;
do {
mediaCodecVideoRenderer.render(positionUs, SystemClock.elapsedRealtime() * 1000);
positionUs += 10;
} while (!mediaCodecVideoRenderer.isEnded());
mediaCodecVideoRenderer.stop();
shadowOf(testMainLooper).idle();
InOrder orderVerifier = inOrder(eventListener);
orderVerifier.verify(eventListener).onVideoFrameProcessingOffset(anyLong(), eq(1), eq(mp4Uhd));
orderVerifier
.verify(eventListener)
.onVideoFrameProcessingOffset(anyLong(), eq(2), eq(VIDEO_H264));
orderVerifier.verify(eventListener).onVideoFrameProcessingOffset(anyLong(), eq(3), eq(mp4Uhd));
orderVerifier
.verify(eventListener)
.onVideoFrameProcessingOffset(anyLong(), eq(1), eq(VIDEO_H264));
orderVerifier.verifyNoMoreInteractions();
}
}
......@@ -67,9 +67,7 @@ public class FakeVideoRenderer extends FakeRenderer {
super.onStopped();
eventDispatcher.droppedFrames(/* droppedFrameCount= */ 0, /* elapsedMs= */ 0);
eventDispatcher.reportVideoFrameProcessingOffset(
/* totalProcessingOffsetUs= */ 400000,
/* frameCount= */ 10,
Assertions.checkNotNull(format));
/* totalProcessingOffsetUs= */ 400000, /* frameCount= */ 10);
}
@Override
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment