Commit fd3e10ed by olly Committed by kim-vde

Fix VPFO counter reporting

DecoderCounters are reset in onEnabled, but the way the local
counters in MediaCodecVideoRenderers were reset assumed the
DecoderCounters were reset in onStarted.

PiperOrigin-RevId: 320440991
parent 351b54e9
...@@ -515,22 +515,17 @@ public interface AnalyticsListener { ...@@ -515,22 +515,17 @@ public interface AnalyticsListener {
* Called when there is an update to the video frame processing offset reported by a video * Called when there is an update to the video frame processing offset reported by a video
* renderer. * renderer.
* *
* <p>Video processing offset represents how early a video frame is processed compared to the * <p>The processing offset for a video frame is the difference between the time at which the
* player's current position. For each video frame, the offset is calculated as <em>P<sub>vf</sub> * frame became available to render, and the time at which it was scheduled to be rendered. A
* - P<sub>pl</sub></em> where <em>P<sub>vf</sub></em> is the presentation timestamp of the video * positive value indicates the frame became available early enough, whereas a negative value
* frame and <em>P<sub>pl</sub></em> is the current position of the player. Positive values * indicates that the frame wasn't available until after the time at which it should have been
* indicate the frame was processed early enough whereas negative values indicate that the * rendered.
* player's position had progressed beyond the frame's timestamp when the frame was processed (and *
* the frame was probably dropped). * @param eventTime The event time.
* * @param totalProcessingOffsetUs The sum of the video frame processing offsets for frames
* <p>The renderer reports the sum of video processing offset samples (one sample per processed * rendered since the last call to this method.
* video frame: dropped, skipped or rendered) and the total number of samples (frames). * @param frameCount The number to samples included in {@code totalProcessingOffsetUs}.
* * @param format The video {@link Format} being rendered.
* @param eventTime The event time.
* @param totalProcessingOffsetUs The sum of video frame processing offset samples for all video
* frames processed by the renderer in microseconds.
* @param frameCount The number to samples included in the {@code totalProcessingOffsetUs}.
* @param format The current output {@link Format} rendered by the video renderer.
*/ */
default void onVideoFrameProcessingOffset( default void onVideoFrameProcessingOffset(
EventTime eventTime, long totalProcessingOffsetUs, int frameCount, Format format) {} EventTime eventTime, long totalProcessingOffsetUs, int frameCount, Format format) {}
......
...@@ -74,19 +74,22 @@ public final class DecoderCounters { ...@@ -74,19 +74,22 @@ public final class DecoderCounters {
*/ */
public int droppedToKeyframeCount; public int droppedToKeyframeCount;
/** /**
* The sum of video frame processing offset samples in microseconds. * The sum of the video frame processing offsets in microseconds.
* *
* <p>Video frame processing offset measures how early a video frame was processed by a video * <p>The processing offset for a video frame is the difference between the time at which the
* renderer compared to the player's current position. * frame became available to render, and the time at which it was scheduled to be rendered. A
* positive value indicates the frame became available early enough, whereas a negative value
* indicates that the frame wasn't available until after the time at which it should have been
* rendered.
* *
* <p>Note: Use {@link #addVideoFrameProcessingOffsetSample(long)} to update this field instead of * <p>Note: Use {@link #addVideoFrameProcessingOffset(long)} to update this field instead of
* updating it directly. * updating it directly.
*/ */
public long totalVideoFrameProcessingOffsetUs; public long totalVideoFrameProcessingOffsetUs;
/** /**
* The number of video frame processing offset samples added. * The number of video frame processing offsets added.
* *
* <p>Note: Use {@link #addVideoFrameProcessingOffsetSample(long)} to update this field instead of * <p>Note: Use {@link #addVideoFrameProcessingOffset(long)} to update this field instead of
* updating it directly. * updating it directly.
*/ */
public int videoFrameProcessingOffsetCount; public int videoFrameProcessingOffsetCount;
...@@ -117,25 +120,24 @@ public final class DecoderCounters { ...@@ -117,25 +120,24 @@ public final class DecoderCounters {
maxConsecutiveDroppedBufferCount = Math.max(maxConsecutiveDroppedBufferCount, maxConsecutiveDroppedBufferCount = Math.max(maxConsecutiveDroppedBufferCount,
other.maxConsecutiveDroppedBufferCount); other.maxConsecutiveDroppedBufferCount);
droppedToKeyframeCount += other.droppedToKeyframeCount; droppedToKeyframeCount += other.droppedToKeyframeCount;
addVideoFrameProcessingOffsets(
addVideoFrameProcessingOffsetSamples(
other.totalVideoFrameProcessingOffsetUs, other.videoFrameProcessingOffsetCount); other.totalVideoFrameProcessingOffsetUs, other.videoFrameProcessingOffsetCount);
} }
/** /**
* Adds a video frame processing offset sample to {@link #totalVideoFrameProcessingOffsetUs} and * Adds a video frame processing offset to {@link #totalVideoFrameProcessingOffsetUs} and
* increases {@link #videoFrameProcessingOffsetCount} by one. * increases {@link #videoFrameProcessingOffsetCount} by one.
* *
* <p>Convenience method to ensure both fields are updated when adding a sample. * <p>Convenience method to ensure both fields are updated when adding a single offset.
* *
* @param sampleUs The sample in microseconds. * @param processingOffsetUs The video frame processing offset in microseconds.
*/ */
public void addVideoFrameProcessingOffsetSample(long sampleUs) { public void addVideoFrameProcessingOffset(long processingOffsetUs) {
addVideoFrameProcessingOffsetSamples(sampleUs, /* count= */ 1); addVideoFrameProcessingOffsets(processingOffsetUs, /* count= */ 1);
} }
private void addVideoFrameProcessingOffsetSamples(long sampleUs, int count) { private void addVideoFrameProcessingOffsets(long totalProcessingOffsetUs, int count) {
totalVideoFrameProcessingOffsetUs += sampleUs; totalVideoFrameProcessingOffsetUs += totalProcessingOffsetUs;
videoFrameProcessingOffsetCount += count; videoFrameProcessingOffsetCount += count;
} }
} }
...@@ -40,6 +40,7 @@ import com.google.android.exoplayer2.Format; ...@@ -40,6 +40,7 @@ import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.PlayerMessage.Target; import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.RendererCapabilities; import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.decoder.DecoderCounters;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.mediacodec.MediaCodecAdapter; import com.google.android.exoplayer2.mediacodec.MediaCodecAdapter;
...@@ -786,7 +787,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -786,7 +787,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// Skip frames in sync with playback, so we'll be at the right frame if the mode changes. // Skip frames in sync with playback, so we'll be at the right frame if the mode changes.
if (isBufferLate(earlyUs)) { if (isBufferLate(earlyUs)) {
skipOutputBuffer(codec, bufferIndex, presentationTimeUs); skipOutputBuffer(codec, bufferIndex, presentationTimeUs);
decoderCounters.addVideoFrameProcessingOffsetSample(earlyUs); updateVideoFrameProcessingOffsetCounters(earlyUs);
return true; return true;
} }
return false; return false;
...@@ -813,7 +814,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -813,7 +814,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} else { } else {
renderOutputBuffer(codec, bufferIndex, presentationTimeUs); renderOutputBuffer(codec, bufferIndex, presentationTimeUs);
} }
decoderCounters.addVideoFrameProcessingOffsetSample(earlyUs); updateVideoFrameProcessingOffsetCounters(earlyUs);
return true; return true;
} }
...@@ -846,7 +847,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -846,7 +847,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} else { } else {
dropOutputBuffer(codec, bufferIndex, presentationTimeUs); dropOutputBuffer(codec, bufferIndex, presentationTimeUs);
} }
decoderCounters.addVideoFrameProcessingOffsetSample(earlyUs); updateVideoFrameProcessingOffsetCounters(earlyUs);
return true; return true;
} }
...@@ -856,7 +857,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -856,7 +857,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
notifyFrameMetadataListener( notifyFrameMetadataListener(
presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat); presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, adjustedReleaseTimeNs); renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, adjustedReleaseTimeNs);
decoderCounters.addVideoFrameProcessingOffsetSample(earlyUs); updateVideoFrameProcessingOffsetCounters(earlyUs);
return true; return true;
} }
} else { } else {
...@@ -876,7 +877,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -876,7 +877,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
notifyFrameMetadataListener( notifyFrameMetadataListener(
presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat); presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
renderOutputBuffer(codec, bufferIndex, presentationTimeUs); renderOutputBuffer(codec, bufferIndex, presentationTimeUs);
decoderCounters.addVideoFrameProcessingOffsetSample(earlyUs); updateVideoFrameProcessingOffsetCounters(earlyUs);
return true; return true;
} }
} }
...@@ -1032,8 +1033,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -1032,8 +1033,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
/** /**
* Updates decoder counters to reflect that {@code droppedBufferCount} additional buffers were * Updates local counters and {@link DecoderCounters} to reflect that {@code droppedBufferCount}
* dropped. * additional buffers were dropped.
* *
* @param droppedBufferCount The number of additional dropped buffers. * @param droppedBufferCount The number of additional dropped buffers.
*/ */
...@@ -1049,6 +1050,17 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -1049,6 +1050,17 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
/** /**
* Updates local counters and {@link DecoderCounters} with a new video frame processing offset.
*
* @param processingOffsetUs The video frame processing offset.
*/
protected void updateVideoFrameProcessingOffsetCounters(long processingOffsetUs) {
decoderCounters.addVideoFrameProcessingOffset(processingOffsetUs);
totalVideoFrameProcessingOffsetUs += processingOffsetUs;
videoFrameProcessingOffsetCount++;
}
/**
* Renders the output buffer with the specified index. This method is only called if the platform * Renders the output buffer with the specified index. This method is only called if the platform
* API version of the device is less than 21. * API version of the device is less than 21.
* *
...@@ -1215,18 +1227,12 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -1215,18 +1227,12 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
private void maybeNotifyVideoFrameProcessingOffset() { private void maybeNotifyVideoFrameProcessingOffset() {
Format outputFormat = getCurrentOutputFormat(); @Nullable Format outputFormat = getCurrentOutputFormat();
if (outputFormat != null) { if (outputFormat != null && videoFrameProcessingOffsetCount != 0) {
long totalOffsetDelta =
decoderCounters.totalVideoFrameProcessingOffsetUs - totalVideoFrameProcessingOffsetUs;
int countDelta =
decoderCounters.videoFrameProcessingOffsetCount - videoFrameProcessingOffsetCount;
if (countDelta != 0) {
eventDispatcher.reportVideoFrameProcessingOffset( eventDispatcher.reportVideoFrameProcessingOffset(
totalOffsetDelta, countDelta, outputFormat); totalVideoFrameProcessingOffsetUs, videoFrameProcessingOffsetCount, outputFormat);
totalVideoFrameProcessingOffsetUs = decoderCounters.totalVideoFrameProcessingOffsetUs; totalVideoFrameProcessingOffsetUs = 0;
videoFrameProcessingOffsetCount = decoderCounters.videoFrameProcessingOffsetCount; videoFrameProcessingOffsetCount = 0;
}
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment