Commit b9205746 by claincly Committed by Rohit Singh

Remove setters for streamOffset.

In addition to the changes in https://github.com/google/ExoPlayer/commit/b18fb368cca9843aeca2cc4d5a01aa4fa41b4bd7

This change essentially reverts https://github.com/google/ExoPlayer/commit/30e5bc9837e2423cd2bb426c5797211e0f6ad76b (Merged Jul 2022).

From this CL on, `VideoFrameProcessor` takes in non-offset, monotonically
increasing timestamps. For example, with one 5s and one 10s video,

- `VideoFrameProcessor`'s input should start from 0
- On switching to the second video (10s), the timestamp of the first frame in
  the second video should be at 5s.

In ExoPlayer however, `streamOffset` is managed differently and thus needs
correction before sending the frames to `VideoFrameProcessor`:
- The timestamp of the first video is offset by a large int, so the first frame
  of the first media item has timestamp (assuming) 10000000000000000
- The last frame of the first media item has 10000005000000000
- At this point the stream off set is updated to 10000005000000000
- The pts of the first frame of the second video starts from 0 again.

PiperOrigin-RevId: 523444236
parent fd9beb68
...@@ -28,7 +28,6 @@ public class FrameInfo { ...@@ -28,7 +28,6 @@ public class FrameInfo {
private int width; private int width;
private int height; private int height;
private float pixelWidthHeightRatio; private float pixelWidthHeightRatio;
private long streamOffsetUs;
private long offsetToAddUs; private long offsetToAddUs;
/** /**
...@@ -48,7 +47,6 @@ public class FrameInfo { ...@@ -48,7 +47,6 @@ public class FrameInfo {
width = frameInfo.width; width = frameInfo.width;
height = frameInfo.height; height = frameInfo.height;
pixelWidthHeightRatio = frameInfo.pixelWidthHeightRatio; pixelWidthHeightRatio = frameInfo.pixelWidthHeightRatio;
streamOffsetUs = frameInfo.streamOffsetUs;
offsetToAddUs = frameInfo.offsetToAddUs; offsetToAddUs = frameInfo.offsetToAddUs;
} }
...@@ -78,17 +76,6 @@ public class FrameInfo { ...@@ -78,17 +76,6 @@ public class FrameInfo {
} }
/** /**
* Sets the {@linkplain FrameInfo#streamOffsetUs stream offset}, in microseconds.
*
* <p>The default value is {@code 0}.
*/
@CanIgnoreReturnValue
public Builder setStreamOffsetUs(long streamOffsetUs) {
this.streamOffsetUs = streamOffsetUs;
return this;
}
/**
* Sets the {@linkplain FrameInfo#offsetToAddUs offset to add} to the frame presentation * Sets the {@linkplain FrameInfo#offsetToAddUs offset to add} to the frame presentation
* timestamp, in microseconds. * timestamp, in microseconds.
* *
...@@ -102,7 +89,7 @@ public class FrameInfo { ...@@ -102,7 +89,7 @@ public class FrameInfo {
/** Builds a {@link FrameInfo} instance. */ /** Builds a {@link FrameInfo} instance. */
public FrameInfo build() { public FrameInfo build() {
return new FrameInfo(width, height, pixelWidthHeightRatio, streamOffsetUs, offsetToAddUs); return new FrameInfo(width, height, pixelWidthHeightRatio, offsetToAddUs);
} }
} }
...@@ -113,16 +100,6 @@ public class FrameInfo { ...@@ -113,16 +100,6 @@ public class FrameInfo {
/** The ratio of width over height for each pixel. */ /** The ratio of width over height for each pixel. */
public final float pixelWidthHeightRatio; public final float pixelWidthHeightRatio;
/** /**
* An offset in microseconds that is part of the input timestamps and should be ignored for
* processing but added back to the output timestamps.
*
* <p>The offset stays constant within a stream. If the first timestamp of the next stream is less
* than or equal to the last timestamp of the current stream (including the {@linkplain
* #offsetToAddUs} offset to add), the stream offset must be updated between the streams to ensure
* that the offset frame timestamps are always monotonically increasing.
*/
public final long streamOffsetUs;
/**
* The offset that must be added to the frame presentation timestamp, in microseconds. * The offset that must be added to the frame presentation timestamp, in microseconds.
* *
* <p>This offset is not part of the input timestamps. It is added to the frame timestamps before * <p>This offset is not part of the input timestamps. It is added to the frame timestamps before
...@@ -132,15 +109,13 @@ public class FrameInfo { ...@@ -132,15 +109,13 @@ public class FrameInfo {
// TODO(b/227624622): Add color space information for HDR. // TODO(b/227624622): Add color space information for HDR.
private FrameInfo( private FrameInfo(int width, int height, float pixelWidthHeightRatio, long offsetToAddUs) {
int width, int height, float pixelWidthHeightRatio, long streamOffsetUs, long offsetToAddUs) {
checkArgument(width > 0, "width must be positive, but is: " + width); checkArgument(width > 0, "width must be positive, but is: " + width);
checkArgument(height > 0, "height must be positive, but is: " + height); checkArgument(height > 0, "height must be positive, but is: " + height);
this.width = width; this.width = width;
this.height = height; this.height = height;
this.pixelWidthHeightRatio = pixelWidthHeightRatio; this.pixelWidthHeightRatio = pixelWidthHeightRatio;
this.streamOffsetUs = streamOffsetUs;
this.offsetToAddUs = offsetToAddUs; this.offsetToAddUs = offsetToAddUs;
} }
} }
...@@ -171,10 +171,6 @@ public interface VideoFrameProcessor { ...@@ -171,10 +171,6 @@ public interface VideoFrameProcessor {
* <p>Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output * <p>Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output
* frames' pixels have a ratio of 1. * frames' pixels have a ratio of 1.
* *
* <p>The caller should update {@link FrameInfo#streamOffsetUs} when switching to an input stream
* whose first frame timestamp is less than or equal to the last timestamp received. This stream
* offset should ensure that frame timestamps are monotonically increasing.
*
* <p>Can be called on any thread. * <p>Can be called on any thread.
*/ */
void setInputFrameInfo(FrameInfo inputFrameInfo); void setInputFrameInfo(FrameInfo inputFrameInfo);
......
...@@ -963,7 +963,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -963,7 +963,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override @Override
protected void onReadyToInitializeCodec(Format format) throws ExoPlaybackException { protected void onReadyToInitializeCodec(Format format) throws ExoPlaybackException {
if (!videoFrameProcessorManager.isEnabled()) { if (!videoFrameProcessorManager.isEnabled()) {
videoFrameProcessorManager.maybeEnable(format); videoFrameProcessorManager.maybeEnable(format, getOutputStreamOffsetUs());
} }
} }
...@@ -1526,7 +1526,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -1526,7 +1526,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// input surface, which is not a SurfaceView. // input surface, which is not a SurfaceView.
long releaseTimeNs = long releaseTimeNs =
videoFrameProcessorManager.isEnabled() videoFrameProcessorManager.isEnabled()
? (presentationTimeUs + getOutputStreamOffsetUs()) * 1000 ? videoFrameProcessorManager.getCorrectedFramePresentationTimeUs(
presentationTimeUs, getOutputStreamOffsetUs())
* 1000
: System.nanoTime(); : System.nanoTime();
if (notifyFrameMetadataListener) { if (notifyFrameMetadataListener) {
notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format); notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format);
...@@ -1889,6 +1891,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -1889,6 +1891,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/** The presentation time, after which the listener should be notified about the size change. */ /** The presentation time, after which the listener should be notified about the size change. */
private long pendingOutputSizeChangeNotificationTimeUs; private long pendingOutputSizeChangeNotificationTimeUs;
private long initialStreamOffsetUs;
/** Creates a new instance. */ /** Creates a new instance. */
public VideoFrameProcessorManager( public VideoFrameProcessorManager(
VideoFrameReleaseHelper frameReleaseHelper, VideoFrameReleaseHelper frameReleaseHelper,
...@@ -1902,6 +1906,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -1902,6 +1906,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
lastCodecBufferPresentationTimestampUs = C.TIME_UNSET; lastCodecBufferPresentationTimestampUs = C.TIME_UNSET;
processedFrameSize = VideoSize.UNKNOWN; processedFrameSize = VideoSize.UNKNOWN;
pendingOutputSizeChangeNotificationTimeUs = C.TIME_UNSET; pendingOutputSizeChangeNotificationTimeUs = C.TIME_UNSET;
initialStreamOffsetUs = C.TIME_UNSET;
} }
/** Sets the {@linkplain Effect video effects}. */ /** Sets the {@linkplain Effect video effects}. */
...@@ -1961,7 +1966,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -1961,7 +1966,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* @throws ExoPlaybackException When enabling the {@link VideoFrameProcessor} failed. * @throws ExoPlaybackException When enabling the {@link VideoFrameProcessor} failed.
*/ */
@CanIgnoreReturnValue @CanIgnoreReturnValue
public boolean maybeEnable(Format inputFormat) throws ExoPlaybackException { public boolean maybeEnable(Format inputFormat, long initialStreamOffsetUs)
throws ExoPlaybackException {
checkState(!isEnabled()); checkState(!isEnabled());
if (!canEnableFrameProcessing) { if (!canEnableFrameProcessing) {
return false; return false;
...@@ -2057,6 +2063,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -2057,6 +2063,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
throw new IllegalStateException(); throw new IllegalStateException();
} }
}); });
this.initialStreamOffsetUs = initialStreamOffsetUs;
} catch (Exception e) { } catch (Exception e) {
throw renderer.createRendererException( throw renderer.createRendererException(
e, inputFormat, PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED); e, inputFormat, PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED);
...@@ -2075,6 +2082,18 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -2075,6 +2082,18 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
return true; return true;
} }
public long getCorrectedFramePresentationTimeUs(
long framePresentationTimeUs, long currentStreamOffsetUs) {
// VideoFrameProcessor takes in frames with monotonically increasing, non-offset frame
// timestamps. That is, with two ten-second long videos, the first frame of the second video
// should bear a timestamp of 10s seen from VideoFrameProcessor; while in ExoPlayer, the
// timestamp of the said frame would be 0s, but the streamOffset is incremented 10s to include
// the duration of the first video. Thus this correction is need to correct for the different
// handling of presentation timestamps in ExoPlayer and VideoFrameProcessor.
checkState(initialStreamOffsetUs != C.TIME_UNSET);
return framePresentationTimeUs + currentStreamOffsetUs - initialStreamOffsetUs;
}
/** /**
* Returns the {@linkplain VideoFrameProcessor#getInputSurface input surface} of the {@link * Returns the {@linkplain VideoFrameProcessor#getInputSurface input surface} of the {@link
* VideoFrameProcessor}. * VideoFrameProcessor}.
...@@ -2129,7 +2148,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -2129,7 +2148,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
.setInputFrameInfo( .setInputFrameInfo(
new FrameInfo.Builder(inputFormat.width, inputFormat.height) new FrameInfo.Builder(inputFormat.width, inputFormat.height)
.setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio) .setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio)
.setStreamOffsetUs(renderer.getOutputStreamOffsetUs())
.build()); .build());
this.inputFormat = inputFormat; this.inputFormat = inputFormat;
...@@ -2206,7 +2224,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -2206,7 +2224,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
checkStateNotNull(videoFrameProcessor); checkStateNotNull(videoFrameProcessor);
while (!processedFramesTimestampsUs.isEmpty()) { while (!processedFramesTimestampsUs.isEmpty()) {
boolean isStarted = renderer.getState() == STATE_STARTED; boolean isStarted = renderer.getState() == STATE_STARTED;
long bufferPresentationTimeUs = checkNotNull(processedFramesTimestampsUs.peek()); long framePresentationTimeUs = checkNotNull(processedFramesTimestampsUs.peek());
long bufferPresentationTimeUs = framePresentationTimeUs + initialStreamOffsetUs;
long earlyUs = long earlyUs =
renderer.calculateEarlyTimeUs( renderer.calculateEarlyTimeUs(
positionUs, positionUs,
...@@ -2248,8 +2267,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -2248,8 +2267,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
&& bufferPresentationTimeUs > pendingFrameFormats.peek().first) { && bufferPresentationTimeUs > pendingFrameFormats.peek().first) {
currentFrameFormat = pendingFrameFormats.remove(); currentFrameFormat = pendingFrameFormats.remove();
} }
long framePresentationTimeUs =
bufferPresentationTimeUs - renderer.getOutputStreamOffsetUs();
renderer.notifyFrameMetadataListener( renderer.notifyFrameMetadataListener(
framePresentationTimeUs, adjustedFrameReleaseTimeNs, currentFrameFormat.second); framePresentationTimeUs, adjustedFrameReleaseTimeNs, currentFrameFormat.second);
if (pendingOutputSizeChangeNotificationTimeUs >= bufferPresentationTimeUs) { if (pendingOutputSizeChangeNotificationTimeUs >= bufferPresentationTimeUs) {
......
...@@ -256,12 +256,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -256,12 +256,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private final FinalShaderProgramWrapper finalShaderProgramWrapper; private final FinalShaderProgramWrapper finalShaderProgramWrapper;
private final ImmutableList<GlShaderProgram> allShaderPrograms; private final ImmutableList<GlShaderProgram> allShaderPrograms;
/**
* Offset compared to original media presentation time that has been added to incoming frame
* timestamps, in microseconds.
*/
private long previousStreamOffsetUs;
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo; private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
private volatile boolean inputStreamEnded; private volatile boolean inputStreamEnded;
...@@ -298,7 +292,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -298,7 +292,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
finalShaderProgramWrapper = (FinalShaderProgramWrapper) getLast(shaderPrograms); finalShaderProgramWrapper = (FinalShaderProgramWrapper) getLast(shaderPrograms);
allShaderPrograms = shaderPrograms; allShaderPrograms = shaderPrograms;
previousStreamOffsetUs = C.TIME_UNSET;
} }
/** Returns the task executor that runs video frame processing tasks. */ /** Returns the task executor that runs video frame processing tasks. */
...@@ -340,11 +333,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -340,11 +333,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Override @Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) { public void setInputFrameInfo(FrameInfo inputFrameInfo) {
nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo); nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo);
if (nextInputFrameInfo.streamOffsetUs != previousStreamOffsetUs) {
finalShaderProgramWrapper.appendStream(nextInputFrameInfo.streamOffsetUs);
previousStreamOffsetUs = nextInputFrameInfo.streamOffsetUs;
}
} }
@Override @Override
......
...@@ -63,8 +63,6 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -63,8 +63,6 @@ import java.util.concurrent.atomic.AtomicInteger;
// TODO(b/238302341) Remove the use of after flush task, block the calling thread instead. // TODO(b/238302341) Remove the use of after flush task, block the calling thread instead.
@Nullable private volatile VideoFrameProcessingTask onFlushCompleteTask; @Nullable private volatile VideoFrameProcessingTask onFlushCompleteTask;
private long previousStreamOffsetUs;
/** /**
* Creates a new instance. * Creates a new instance.
* *
...@@ -90,7 +88,6 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -90,7 +88,6 @@ import java.util.concurrent.atomic.AtomicInteger;
textureTransformMatrix = new float[16]; textureTransformMatrix = new float[16];
pendingFrames = new ConcurrentLinkedQueue<>(); pendingFrames = new ConcurrentLinkedQueue<>();
externalShaderProgramInputCapacity = new AtomicInteger(); externalShaderProgramInputCapacity = new AtomicInteger();
previousStreamOffsetUs = C.TIME_UNSET;
surfaceTexture.setOnFrameAvailableListener( surfaceTexture.setOnFrameAvailableListener(
unused -> unused ->
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
...@@ -225,15 +222,8 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -225,15 +222,8 @@ import java.util.concurrent.atomic.AtomicInteger;
externalShaderProgram.setTextureTransformMatrix(textureTransformMatrix); externalShaderProgram.setTextureTransformMatrix(textureTransformMatrix);
long frameTimeNs = surfaceTexture.getTimestamp(); long frameTimeNs = surfaceTexture.getTimestamp();
long offsetToAddUs = currentFrame.offsetToAddUs; long offsetToAddUs = currentFrame.offsetToAddUs;
long streamOffsetUs = currentFrame.streamOffsetUs;
if (streamOffsetUs != previousStreamOffsetUs) {
if (previousStreamOffsetUs != C.TIME_UNSET) {
externalShaderProgram.signalEndOfCurrentInputStream();
}
previousStreamOffsetUs = streamOffsetUs;
}
// Correct the presentation time so that GlShaderPrograms don't see the stream offset. // Correct the presentation time so that GlShaderPrograms don't see the stream offset.
long presentationTimeUs = (frameTimeNs / 1000) + offsetToAddUs - streamOffsetUs; long presentationTimeUs = (frameTimeNs / 1000) + offsetToAddUs;
externalShaderProgram.queueInputFrame( externalShaderProgram.queueInputFrame(
new GlTextureInfo( new GlTextureInfo(
externalTexId, externalTexId,
......
...@@ -17,7 +17,6 @@ package com.google.android.exoplayer2.effect; ...@@ -17,7 +17,6 @@ package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState; import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import android.content.Context; import android.content.Context;
import android.opengl.EGL14; import android.opengl.EGL14;
...@@ -81,7 +80,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -81,7 +80,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final Executor videoFrameProcessorListenerExecutor; private final Executor videoFrameProcessorListenerExecutor;
private final VideoFrameProcessor.Listener videoFrameProcessorListener; private final VideoFrameProcessor.Listener videoFrameProcessorListener;
private final float[] textureTransformMatrix; private final float[] textureTransformMatrix;
private final Queue<Long> streamOffsetUsQueue;
private final Queue<Pair<GlTextureInfo, Long>> availableFrames; private final Queue<Pair<GlTextureInfo, Long>> availableFrames;
@Nullable private final DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener; @Nullable private final DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener;
...@@ -141,7 +139,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -141,7 +139,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.textureOutputListener = textureOutputListener; this.textureOutputListener = textureOutputListener;
textureTransformMatrix = GlUtil.create4x4IdentityMatrix(); textureTransformMatrix = GlUtil.create4x4IdentityMatrix();
streamOffsetUsQueue = new ConcurrentLinkedQueue<>();
inputListener = new InputListener() {}; inputListener = new InputListener() {};
availableFrames = new ConcurrentLinkedQueue<>(); availableFrames = new ConcurrentLinkedQueue<>();
} }
...@@ -175,42 +172,18 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -175,42 +172,18 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public void signalEndOfCurrentInputStream() { public void signalEndOfCurrentInputStream() {
frameProcessingStarted = true; frameProcessingStarted = true;
if (streamOffsetUsQueue.isEmpty()) {
// No input stream to end.
return;
}
streamOffsetUsQueue.remove();
if (streamOffsetUsQueue.isEmpty()) {
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded); videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
} }
}
/**
* Signals that there will be another input stream after all previously appended input streams
* have {@linkplain #signalEndOfCurrentInputStream() ended}.
*
* <p>This method does not need to be called on the GL thread, but the caller must ensure that
* stream offsets are appended in the correct order.
*
* @param streamOffsetUs The presentation timestamp offset, in microseconds.
*/
public void appendStream(long streamOffsetUs) {
streamOffsetUsQueue.add(streamOffsetUs);
}
// Methods that must be called on the GL thread. // Methods that must be called on the GL thread.
@Override @Override
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) { public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
frameProcessingStarted = true; frameProcessingStarted = true;
long streamOffsetUs =
checkStateNotNull(streamOffsetUsQueue.peek(), "No input stream specified.");
long offsetPresentationTimeUs = presentationTimeUs + streamOffsetUs;
videoFrameProcessorListenerExecutor.execute( videoFrameProcessorListenerExecutor.execute(
() -> videoFrameProcessorListener.onOutputFrameAvailable(offsetPresentationTimeUs)); () -> videoFrameProcessorListener.onOutputFrameAvailable(presentationTimeUs));
if (releaseFramesAutomatically) { if (releaseFramesAutomatically) {
renderFrame( renderFrame(inputTexture, presentationTimeUs, /* releaseTimeNs= */ presentationTimeUs * 1000);
inputTexture, presentationTimeUs, /* releaseTimeNs= */ offsetPresentationTimeUs * 1000);
} else { } else {
availableFrames.add(Pair.create(inputTexture, presentationTimeUs)); availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
} }
......
...@@ -179,9 +179,7 @@ public interface GlShaderProgram { ...@@ -179,9 +179,7 @@ public interface GlShaderProgram {
* Notifies the {@code GlShaderProgram} that no further input frames belonging to the current * Notifies the {@code GlShaderProgram} that no further input frames belonging to the current
* input stream will be queued. * input stream will be queued.
* *
* <p>Input frames that are queued after this method is called belong to a different input stream, * <p>Input frames that are queued after this method is called belong to a different input stream.
* so presentation timestamps may reset to start from a smaller presentation timestamp than the
* last frame of the previous input stream.
*/ */
void signalEndOfCurrentInputStream(); void signalEndOfCurrentInputStream();
......
...@@ -52,6 +52,7 @@ import java.util.concurrent.Executor; ...@@ -52,6 +52,7 @@ import java.util.concurrent.Executor;
@Override @Override
public void setInputListener(InputListener inputListener) { public void setInputListener(InputListener inputListener) {
// TODO(b/277726418) Fix over-reported input capacity.
copyGlShaderProgram.setInputListener(inputListener); copyGlShaderProgram.setInputListener(inputListener);
wrappedGlShaderProgram.setInputListener(inputListener); wrappedGlShaderProgram.setInputListener(inputListener);
} }
...@@ -76,6 +77,8 @@ import java.util.concurrent.Executor; ...@@ -76,6 +77,8 @@ import java.util.concurrent.Executor;
@Override @Override
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) { public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
// TODO(b/277726418) Properly report shader program capacity when switching from wrapped shader
// program to copying shader program.
if (presentationTimeUs >= startTimeUs && presentationTimeUs <= endTimeUs) { if (presentationTimeUs >= startTimeUs && presentationTimeUs <= endTimeUs) {
pendingWrappedGlShaderProgramFrames++; pendingWrappedGlShaderProgramFrames++;
wrappedGlShaderProgram.queueInputFrame(inputTexture, presentationTimeUs); wrappedGlShaderProgram.queueInputFrame(inputTexture, presentationTimeUs);
...@@ -100,7 +103,10 @@ import java.util.concurrent.Executor; ...@@ -100,7 +103,10 @@ import java.util.concurrent.Executor;
@Override @Override
public void signalEndOfCurrentInputStream() { public void signalEndOfCurrentInputStream() {
copyGlShaderProgram.signalEndOfCurrentInputStream(); // TODO(b/277726418) Properly handle EOS reporting.
// Only sending EOS signal along the wrapped GL shader program path is semantically incorrect,
// but it ensures the wrapped shader program receives the EOS signal. On the other hand, the
// copy shader program does not need special EOS handling.
wrappedGlShaderProgram.signalEndOfCurrentInputStream(); wrappedGlShaderProgram.signalEndOfCurrentInputStream();
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment