Commit 9f2d2005 by claincly Committed by Tofunmi Adigun-Hameed

Add a tracing utility for debugging video related muxer timeout

PiperOrigin-RevId: 529632886
parent a43ef109
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.effect;
import androidx.annotation.GuardedBy;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.SystemClock;
import com.google.android.exoplayer2.util.Util;
import com.google.common.base.Joiner;
import java.util.ArrayDeque;
import java.util.Queue;
/** A debugging tracing utility. */
public final class DebugTraceUtil {
private static final int MUXER_CAN_WRITE_SAMPLE_RECORD_COUNT = 10;
/** The timestamps at which the decoder received end of stream signal, in milliseconds. */
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> DECODER_RECEIVE_EOS_TIMES_MS = new ArrayDeque<>();
/** The timestamps at which the decoder signalled end of stream, in milliseconds. */
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> DECODER_SIGNAL_EOS_TIMES_MS = new ArrayDeque<>();
/**
* The timestamps at which {@code VideoFrameProcessor} received end of stream signal from the
* decoder, in milliseconds.
*/
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> VIDEO_FRAME_PROCESSOR_RECEIVE_DECODER_EOS_TIMES_MS =
new ArrayDeque<>();
/**
* The timestamps at which {@code VideoFrameProcessor} signalled end of stream, in milliseconds.
*/
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> VIDEO_FRAME_PROCESSOR_SIGNAL_EOS_TIMES_MS = new ArrayDeque<>();
/** The timestamps at which the encoder received end of stream signal, in milliseconds. */
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> ENCODER_RECEIVE_EOS_TIMES_MS = new ArrayDeque<>();
/**
* The last {@link #MUXER_CAN_WRITE_SAMPLE_RECORD_COUNT} values returned by {@code
* MuxerWrapper#canWriteSample}.
*/
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Boolean> MUXER_CAN_WRITE_VIDEO_SAMPLE = new ArrayDeque<>();
/** The timestamps at which the muxer video track is stopped, in milliseconds. */
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> MUXER_TRACK_END_TIMES_MS = new ArrayDeque<>();
/** The number of decoded frames. */
@GuardedBy("DebugTraceUtil.class")
private static int numberOfDecodedFrames = 0;
/** The number of frames made available on {@code VideoFrameProcessor}'s input surface. */
@GuardedBy("DebugTraceUtil.class")
private static int numberOfFramesRenderedToVideoFrameProcessorInput = 0;
/** The number of frames rendered to {@code VideoFrameProcessor}'s output. */
@GuardedBy("DebugTraceUtil.class")
private static int numberOfFramesRenderedToVideoFrameProcessorOutput = 0;
/** The number of encoded frames. */
@GuardedBy("DebugTraceUtil.class")
private static int numberOfEncodedFrames = 0;
/** The number of video frames written to the muxer. */
@GuardedBy("DebugTraceUtil.class")
private static int numberOfMuxedFrames = 0;
private DebugTraceUtil() {}
public static synchronized void reset() {
numberOfDecodedFrames = 0;
numberOfFramesRenderedToVideoFrameProcessorInput = 0;
numberOfFramesRenderedToVideoFrameProcessorOutput = 0;
numberOfEncodedFrames = 0;
numberOfMuxedFrames = 0;
DECODER_RECEIVE_EOS_TIMES_MS.clear();
DECODER_SIGNAL_EOS_TIMES_MS.clear();
VIDEO_FRAME_PROCESSOR_RECEIVE_DECODER_EOS_TIMES_MS.clear();
VIDEO_FRAME_PROCESSOR_SIGNAL_EOS_TIMES_MS.clear();
ENCODER_RECEIVE_EOS_TIMES_MS.clear();
MUXER_CAN_WRITE_VIDEO_SAMPLE.clear();
MUXER_TRACK_END_TIMES_MS.clear();
}
public static synchronized void recordDecodedFrame() {
numberOfDecodedFrames++;
}
public static synchronized void recordFrameRenderedToVideoFrameProcessorInput() {
numberOfFramesRenderedToVideoFrameProcessorInput++;
}
public static synchronized void recordFrameRenderedToVideoFrameProcessorOutput() {
numberOfFramesRenderedToVideoFrameProcessorOutput++;
}
public static synchronized void recordEncodedFrame() {
numberOfEncodedFrames++;
}
public static synchronized void recordMuxerInput(@C.TrackType int trackType) {
if (trackType == C.TRACK_TYPE_VIDEO) {
numberOfMuxedFrames++;
}
}
public static synchronized void recordDecoderReceiveEos() {
DECODER_RECEIVE_EOS_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
public static synchronized void recordDecoderSignalEos() {
DECODER_SIGNAL_EOS_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
public static synchronized void recordVideoFrameProcessorReceiveDecoderEos() {
VIDEO_FRAME_PROCESSOR_RECEIVE_DECODER_EOS_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
public static synchronized void recordVideoFrameProcessorSignalEos() {
VIDEO_FRAME_PROCESSOR_SIGNAL_EOS_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
public static synchronized void recordEncoderReceiveEos() {
ENCODER_RECEIVE_EOS_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
public static synchronized void recordMuxerCanAddSample(
@C.TrackType int trackType, boolean canAddSample) {
if (trackType == C.TRACK_TYPE_VIDEO) {
if (MUXER_CAN_WRITE_VIDEO_SAMPLE.size() == MUXER_CAN_WRITE_SAMPLE_RECORD_COUNT) {
MUXER_CAN_WRITE_VIDEO_SAMPLE.poll();
}
MUXER_CAN_WRITE_VIDEO_SAMPLE.add(canAddSample);
}
}
public static synchronized void recordMuxerTrackEnded(@C.TrackType int trackType) {
if (trackType == C.TRACK_TYPE_VIDEO) {
MUXER_TRACK_END_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
}
public static synchronized String generateTrace() {
return "Decoded: "
+ numberOfDecodedFrames
+ ", Rendered to VFP: "
+ numberOfFramesRenderedToVideoFrameProcessorInput
+ ", Rendered to encoder: "
+ numberOfFramesRenderedToVideoFrameProcessorOutput
+ ", Encoded: "
+ numberOfEncodedFrames
+ ", Muxed: "
+ numberOfMuxedFrames
+ ", Decoder receive EOS: "
+ generateString(DECODER_RECEIVE_EOS_TIMES_MS)
+ ", Decoder signal EOS: "
+ generateString(DECODER_SIGNAL_EOS_TIMES_MS)
+ ", VFP receive EOS: "
+ generateString(VIDEO_FRAME_PROCESSOR_RECEIVE_DECODER_EOS_TIMES_MS)
+ ", VFP signal EOS: "
+ generateString(VIDEO_FRAME_PROCESSOR_SIGNAL_EOS_TIMES_MS)
+ ", Encoder receive EOS: "
+ generateString(ENCODER_RECEIVE_EOS_TIMES_MS)
+ Util.formatInvariant(
", Muxer last %d video canWriteSample: ", MUXER_CAN_WRITE_SAMPLE_RECORD_COUNT)
+ generateString(MUXER_CAN_WRITE_VIDEO_SAMPLE)
+ ", Muxer stopped: "
+ generateString(MUXER_TRACK_END_TIMES_MS);
}
private static <T> String generateString(Queue<T> queue) {
return queue.isEmpty() ? "NO" : Util.formatInvariant("[%s]", Joiner.on(',').join(queue));
}
}
...@@ -417,6 +417,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -417,6 +417,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Override @Override
public void signalEndOfInput() { public void signalEndOfInput() {
DebugTraceUtil.recordVideoFrameProcessorReceiveDecoderEos();
checkState(!inputStreamEnded); checkState(!inputStreamEnded);
inputStreamEnded = true; inputStreamEnded = true;
boolean allInputStreamsProcessed; boolean allInputStreamsProcessed;
......
...@@ -97,6 +97,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -97,6 +97,7 @@ import java.util.concurrent.atomic.AtomicInteger;
unused -> unused ->
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
DebugTraceUtil.recordFrameRenderedToVideoFrameProcessorInput();
if (numberOfFramesToDropOnBecomingAvailable > 0) { if (numberOfFramesToDropOnBecomingAvailable > 0) {
numberOfFramesToDropOnBecomingAvailable--; numberOfFramesToDropOnBecomingAvailable--;
surfaceTexture.updateTexImage(); surfaceTexture.updateTexImage();
......
...@@ -181,6 +181,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -181,6 +181,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
boolean frameProcessingEnded = boolean frameProcessingEnded =
checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed(); checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed();
if (frameProcessingEnded) { if (frameProcessingEnded) {
DebugTraceUtil.recordVideoFrameProcessorSignalEos();
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded); videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
} }
} }
...@@ -325,6 +326,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -325,6 +326,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
? System.nanoTime() ? System.nanoTime()
: renderTimeNs); : renderTimeNs);
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface); EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
DebugTraceUtil.recordFrameRenderedToVideoFrameProcessorOutput();
} }
private void renderFrameToOutputTexture(GlTextureInfo inputTexture, long presentationTimeUs) private void renderFrameToOutputTexture(GlTextureInfo inputTexture, long presentationTimeUs)
......
...@@ -35,6 +35,7 @@ import androidx.annotation.VisibleForTesting; ...@@ -35,6 +35,7 @@ import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.effect.DebugTraceUtil;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MediaFormatUtil; import com.google.android.exoplayer2.util.MediaFormatUtil;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
...@@ -212,6 +213,9 @@ public final class DefaultCodec implements Codec { ...@@ -212,6 +213,9 @@ public final class DefaultCodec implements Codec {
if (inputBuffer.isEndOfStream()) { if (inputBuffer.isEndOfStream()) {
inputStreamEnded = true; inputStreamEnded = true;
flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
if (isVideo && isDecoder) {
DebugTraceUtil.recordDecoderReceiveEos();
}
} }
try { try {
mediaCodec.queueInputBuffer(inputBufferIndex, offset, size, inputBuffer.timeUs, flags); mediaCodec.queueInputBuffer(inputBufferIndex, offset, size, inputBuffer.timeUs, flags);
...@@ -225,6 +229,7 @@ public final class DefaultCodec implements Codec { ...@@ -225,6 +229,7 @@ public final class DefaultCodec implements Codec {
@Override @Override
public void signalEndOfInputStream() throws ExportException { public void signalEndOfInputStream() throws ExportException {
DebugTraceUtil.recordEncoderReceiveEos();
try { try {
mediaCodec.signalEndOfInputStream(); mediaCodec.signalEndOfInputStream();
} catch (RuntimeException e) { } catch (RuntimeException e) {
......
...@@ -23,6 +23,7 @@ import androidx.annotation.Nullable; ...@@ -23,6 +23,7 @@ import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.effect.DebugTraceUtil;
import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.ColorInfo;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
...@@ -127,6 +128,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -127,6 +128,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@RequiresNonNull({"sampleConsumer", "decoder"}) @RequiresNonNull({"sampleConsumer", "decoder"})
protected boolean feedConsumerFromDecoder() throws ExportException { protected boolean feedConsumerFromDecoder() throws ExportException {
if (decoder.isEnded()) { if (decoder.isEnded()) {
DebugTraceUtil.recordDecoderSignalEos();
sampleConsumer.signalEndOfVideoInput(); sampleConsumer.signalEndOfVideoInput();
isEnded = true; isEnded = true;
return false; return false;
...@@ -154,6 +156,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -154,6 +156,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
decoder.releaseOutputBuffer(presentationTimeUs); decoder.releaseOutputBuffer(presentationTimeUs);
DebugTraceUtil.recordDecodedFrame();
return true; return true;
} }
......
...@@ -22,11 +22,13 @@ import static com.google.android.exoplayer2.util.Assertions.checkState; ...@@ -22,11 +22,13 @@ import static com.google.android.exoplayer2.util.Assertions.checkState;
import static java.lang.Math.max; import static java.lang.Math.max;
import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MILLISECONDS;
import android.util.Log;
import android.util.SparseArray; import android.util.SparseArray;
import androidx.annotation.IntRange; import androidx.annotation.IntRange;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.effect.DebugTraceUtil;
import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
...@@ -47,6 +49,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -47,6 +49,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
*/ */
/* package */ final class MuxerWrapper { /* package */ final class MuxerWrapper {
private static final String MUXER_TIMEOUT_ERROR_FORMAT_STRING =
"Abort: no output sample written in the last %d milliseconds. DebugTrace: %s";
public interface Listener { public interface Listener {
void onTrackEnded( void onTrackEnded(
@C.TrackType int trackType, Format format, int averageBitrate, int sampleCount); @C.TrackType int trackType, Format format, int averageBitrate, int sampleCount);
...@@ -191,8 +196,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -191,8 +196,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
// SparseArray.get() returns null by default if the value is not found. // SparseArray.get() returns null by default if the value is not found.
checkArgument( checkArgument(
trackInfo != null, "Could not write sample because there is no track of type " + trackType); trackInfo != null, "Could not write sample because there is no track of type " + trackType);
boolean canWriteSample = canWriteSample(trackType, presentationTimeUs);
if (!canWriteSample(trackType, presentationTimeUs)) { DebugTraceUtil.recordMuxerCanAddSample(trackType, canWriteSample);
if (!canWriteSample) {
return false; return false;
} }
...@@ -204,6 +210,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -204,6 +210,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
resetAbortTimer(); resetAbortTimer();
muxer.writeSampleData( muxer.writeSampleData(
trackInfo.index, data, presentationTimeUs, isKeyFrame ? C.BUFFER_FLAG_KEY_FRAME : 0); trackInfo.index, data, presentationTimeUs, isKeyFrame ? C.BUFFER_FLAG_KEY_FRAME : 0);
DebugTraceUtil.recordMuxerInput(trackType);
previousTrackType = trackType; previousTrackType = trackType;
return true; return true;
} }
...@@ -225,12 +232,15 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -225,12 +232,15 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
listener.onTrackEnded( listener.onTrackEnded(
trackType, trackInfo.format, trackInfo.getAverageBitrate(), trackInfo.sampleCount); trackType, trackInfo.format, trackInfo.getAverageBitrate(), trackInfo.sampleCount);
DebugTraceUtil.recordMuxerTrackEnded(trackType);
trackTypeToInfo.delete(trackType); trackTypeToInfo.delete(trackType);
if (trackTypeToInfo.size() == 0) { if (trackTypeToInfo.size() == 0) {
abortScheduledExecutorService.shutdownNow(); abortScheduledExecutorService.shutdownNow();
if (!isEnded) { if (!isEnded) {
isEnded = true; isEnded = true;
listener.onEnded(Util.usToMs(maxEndedTrackTimeUs), getCurrentOutputSizeBytes()); listener.onEnded(Util.usToMs(maxEndedTrackTimeUs), getCurrentOutputSizeBytes());
Log.e("LYC", DebugTraceUtil.generateTrace());
} }
} }
} }
...@@ -298,9 +308,10 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -298,9 +308,10 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
listener.onError( listener.onError(
ExportException.createForMuxer( ExportException.createForMuxer(
new IllegalStateException( new IllegalStateException(
"No output sample written in the last " Util.formatInvariant(
+ maxDelayBetweenSamplesMs MUXER_TIMEOUT_ERROR_FORMAT_STRING,
+ " milliseconds. Aborting transformation."), maxDelayBetweenSamplesMs,
DebugTraceUtil.generateTrace())),
ExportException.ERROR_CODE_MUXING_TIMEOUT)); ExportException.ERROR_CODE_MUXING_TIMEOUT));
}, },
maxDelayBetweenSamplesMs, maxDelayBetweenSamplesMs,
......
...@@ -29,6 +29,7 @@ import com.google.android.exoplayer2.C; ...@@ -29,6 +29,7 @@ import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlayerLibraryInfo; import com.google.android.exoplayer2.ExoPlayerLibraryInfo;
import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.audio.AudioProcessor; import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.effect.DebugTraceUtil;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor; import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.effect.Presentation; import com.google.android.exoplayer2.effect.Presentation;
import com.google.android.exoplayer2.source.DefaultMediaSourceFactory; import com.google.android.exoplayer2.source.DefaultMediaSourceFactory;
...@@ -710,6 +711,7 @@ public final class Transformer { ...@@ -710,6 +711,7 @@ public final class Transformer {
HandlerWrapper applicationHandler = clock.createHandler(looper, /* callback= */ null); HandlerWrapper applicationHandler = clock.createHandler(looper, /* callback= */ null);
FallbackListener fallbackListener = FallbackListener fallbackListener =
new FallbackListener(composition, listeners, applicationHandler, transformationRequest); new FallbackListener(composition, listeners, applicationHandler, transformationRequest);
DebugTraceUtil.reset();
transformerInternal = transformerInternal =
new TransformerInternal( new TransformerInternal(
context, context,
......
...@@ -37,6 +37,7 @@ import androidx.annotation.VisibleForTesting; ...@@ -37,6 +37,7 @@ import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.effect.DebugTraceUtil;
import com.google.android.exoplayer2.effect.Presentation; import com.google.android.exoplayer2.effect.Presentation;
import com.google.android.exoplayer2.util.Consumer; import com.google.android.exoplayer2.util.Consumer;
import com.google.android.exoplayer2.util.DebugViewProvider; import com.google.android.exoplayer2.util.DebugViewProvider;
...@@ -289,6 +290,7 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -289,6 +290,7 @@ import org.checkerframework.dataflow.qual.Pure;
bufferInfo.presentationTimeUs = finalFramePresentationTimeUs; bufferInfo.presentationTimeUs = finalFramePresentationTimeUs;
} }
} }
DebugTraceUtil.recordEncodedFrame();
encoderOutputBuffer.timeUs = bufferInfo.presentationTimeUs; encoderOutputBuffer.timeUs = bufferInfo.presentationTimeUs;
encoderOutputBuffer.setFlags(bufferInfo.flags); encoderOutputBuffer.setFlags(bufferInfo.flags);
return encoderOutputBuffer; return encoderOutputBuffer;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment