Commit d2e8ac44 by claincly Committed by Ian Baker

Add a listener once one MediaItem is fully processed

Add `VideoFrameProcessor.registerInputStream()` to signal a new type of input.

And `InputHandler.signalEndOfCurrentInputStream()` to signal to `InputHandler`
partial input stream completion.

Fully processed means after FinalShaderProgramWrapper releases the last frame.

PiperOrigin-RevId: 527356646
parent e3fd974e
...@@ -179,6 +179,15 @@ public interface VideoFrameProcessor { ...@@ -179,6 +179,15 @@ public interface VideoFrameProcessor {
Surface getInputSurface(); Surface getInputSurface();
/** /**
* Informs the {@code VideoFrameProcessor} that a new input stream will be queued.
*
* <p>Call {@link #setInputFrameInfo} before this method if the {@link FrameInfo} of the new input
* stream differs from that of the current input stream.
*/
// TODO(b/274109008) Merge this and setInputFrameInfo.
void registerInputStream(@InputType int inputType);
/**
* Sets information about the input frames. * Sets information about the input frames.
* *
* <p>The new input information is applied from the next frame {@linkplain #registerInputFrame() * <p>The new input information is applied from the next frame {@linkplain #registerInputFrame()
......
...@@ -350,10 +350,11 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest { ...@@ -350,10 +350,11 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
checkNotNull(defaultVideoFrameProcessor) checkNotNull(defaultVideoFrameProcessor)
.setInputFrameInfo(new FrameInfo.Builder(WIDTH, HEIGHT).build()); .setInputFrameInfo(new FrameInfo.Builder(WIDTH, HEIGHT).build());
// A frame needs to be registered despite not queuing any external input to ensure // A frame needs to be registered despite not queuing any external input to ensure
// that // that the video frame processor knows about the stream offset.
// the video frame processor knows about the stream offset. defaultVideoFrameProcessor.registerInputStream(INPUT_TYPE_SURFACE);
defaultVideoFrameProcessor.registerInputFrame(); defaultVideoFrameProcessor.registerInputFrame();
blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs); blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs);
defaultVideoFrameProcessor.signalEndOfInput();
}); });
videoFrameProcessingEndedCountDownLatch.await(); videoFrameProcessingEndedCountDownLatch.await();
@Nullable @Nullable
......
...@@ -46,9 +46,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -46,9 +46,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private int downstreamShaderProgramCapacity; private int downstreamShaderProgramCapacity;
private int framesToQueueForCurrentBitmap; private int framesToQueueForCurrentBitmap;
private double currentPresentationTimeUs; private double currentPresentationTimeUs;
private boolean inputEnded;
private boolean useHdr; private boolean useHdr;
private boolean outputEnded; private volatile boolean inputEnded;
/** /**
* Creates a new instance. * Creates a new instance.
...@@ -89,11 +88,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -89,11 +88,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
@Override @Override
public void signalEndOfCurrentInputStream() {
// Do nothing here. End of current input signaling is handled in maybeQueueToShaderProgram().
}
@Override
public void signalEndOfInput() { public void signalEndOfInput() {
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
inputEnded = true; if (framesToQueueForCurrentBitmap == 0 && pendingBitmaps.isEmpty()) {
maybeSignalEndOfOutput(); shaderProgram.signalEndOfCurrentInputStream();
} else {
inputEnded = true;
}
}); });
} }
...@@ -118,9 +125,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -118,9 +125,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
Bitmap bitmap, long durationUs, long offsetUs, float frameRate, boolean useHdr) Bitmap bitmap, long durationUs, long offsetUs, float frameRate, boolean useHdr)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
this.useHdr = useHdr; this.useHdr = useHdr;
if (inputEnded) {
return;
}
int framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND)); int framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND));
double frameDurationUs = C.MICROS_PER_SECOND / frameRate; double frameDurationUs = C.MICROS_PER_SECOND / frameRate;
pendingBitmaps.add(new BitmapFrameSequenceInfo(bitmap, offsetUs, frameDurationUs, framesToAdd)); pendingBitmaps.add(new BitmapFrameSequenceInfo(bitmap, offsetUs, frameDurationUs, framesToAdd));
...@@ -172,17 +176,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -172,17 +176,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
currentPresentationTimeUs += currentBitmapInfo.frameDurationUs; currentPresentationTimeUs += currentBitmapInfo.frameDurationUs;
if (framesToQueueForCurrentBitmap == 0) { if (framesToQueueForCurrentBitmap == 0) {
pendingBitmaps.remove(); pendingBitmaps.remove();
maybeSignalEndOfOutput(); if (pendingBitmaps.isEmpty() && inputEnded) {
} // Only signal end of stream after all pending bitmaps are processed.
} // TODO(b/269424561): Call signalEndOfCurrentInputStream on every bitmap
shaderProgram.signalEndOfCurrentInputStream();
private void maybeSignalEndOfOutput() { }
if (framesToQueueForCurrentBitmap == 0
&& pendingBitmaps.isEmpty()
&& inputEnded
&& !outputEnded) {
shaderProgram.signalEndOfCurrentInputStream();
outputEnded = true;
} }
} }
......
...@@ -49,6 +49,8 @@ import com.google.common.collect.ImmutableList; ...@@ -49,6 +49,8 @@ import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.List; import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
...@@ -252,6 +254,10 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -252,6 +254,10 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private final boolean releaseFramesAutomatically; private final boolean releaseFramesAutomatically;
private final FinalShaderProgramWrapper finalShaderProgramWrapper; private final FinalShaderProgramWrapper finalShaderProgramWrapper;
private final ImmutableList<GlShaderProgram> allShaderPrograms; private final ImmutableList<GlShaderProgram> allShaderPrograms;
// A queue of input streams that have not been fully processed identified by their input types.
private final Queue<@InputType Integer> unprocessedInputStreams;
@Nullable private volatile CountDownLatch latch;
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo; private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
private volatile boolean inputStreamEnded; private volatile boolean inputStreamEnded;
...@@ -270,6 +276,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -270,6 +276,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
this.eglContext = eglContext; this.eglContext = eglContext;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor; this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
this.releaseFramesAutomatically = releaseFramesAutomatically; this.releaseFramesAutomatically = releaseFramesAutomatically;
this.unprocessedInputStreams = new ConcurrentLinkedQueue<>();
checkState(!shaderPrograms.isEmpty()); checkState(!shaderPrograms.isEmpty());
checkState(getLast(shaderPrograms) instanceof FinalShaderProgramWrapper); checkState(getLast(shaderPrograms) instanceof FinalShaderProgramWrapper);
...@@ -294,6 +301,22 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -294,6 +301,22 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
inputShaderProgram.setInputListener(inputHandler); inputShaderProgram.setInputListener(inputHandler);
finalShaderProgramWrapper = (FinalShaderProgramWrapper) getLast(shaderPrograms); finalShaderProgramWrapper = (FinalShaderProgramWrapper) getLast(shaderPrograms);
finalShaderProgramWrapper.setOnInputStreamProcessedListener(
() -> {
@InputType int currentInputType = unprocessedInputStreams.remove();
if (latch != null) {
latch.countDown();
}
if (currentInputType == INPUT_TYPE_BITMAP) {
// Remove all pending bitmap input, because BitmapTextureManager signals end of input
// after all queued bitmaps are processed.
while (!unprocessedInputStreams.isEmpty()
&& checkNotNull(unprocessedInputStreams.peek()) == INPUT_TYPE_BITMAP) {
unprocessedInputStreams.remove();
}
}
return inputStreamEnded && unprocessedInputStreams.isEmpty();
});
allShaderPrograms = shaderPrograms; allShaderPrograms = shaderPrograms;
} }
...@@ -342,6 +365,24 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -342,6 +365,24 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
} }
@Override @Override
public void registerInputStream(@InputType int inputType) {
if (!unprocessedInputStreams.isEmpty()) {
inputHandler.signalEndOfCurrentInputStream();
// Wait until the current video is processed before continuing to the next input.
if (checkNotNull(unprocessedInputStreams.peek()) == INPUT_TYPE_SURFACE) {
latch = new CountDownLatch(1);
try {
latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
Log.e(TAG, "Error waiting for end of stream " + e);
}
}
}
unprocessedInputStreams.add(inputType);
}
@Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) { public void setInputFrameInfo(FrameInfo inputFrameInfo) {
nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo); nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo);
hasRefreshedNextInputFrameInfo = true; hasRefreshedNextInputFrameInfo = true;
...@@ -380,7 +421,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -380,7 +421,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
public void signalEndOfInput() { public void signalEndOfInput() {
checkState(!inputStreamEnded); checkState(!inputStreamEnded);
inputStreamEnded = true; inputStreamEnded = true;
videoFrameProcessingTaskExecutor.submit(inputHandler::signalEndOfInput); inputHandler.signalEndOfCurrentInputStream();
inputHandler.signalEndOfInput();
} }
@Override @Override
......
...@@ -57,6 +57,9 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -57,6 +57,9 @@ import java.util.concurrent.atomic.AtomicInteger;
// Read and written on the GL thread only. // Read and written on the GL thread only.
private boolean inputStreamEnded; private boolean inputStreamEnded;
// Read and written on the GL thread only.
private boolean currentInputStreamEnded;
// The frame that is sent downstream and is not done processing yet. // The frame that is sent downstream and is not done processing yet.
// Set to null on any thread. Read and set to non-null on the GL thread only. // Set to null on any thread. Read and set to non-null on the GL thread only.
@Nullable private volatile FrameInfo currentFrame; @Nullable private volatile FrameInfo currentFrame;
...@@ -135,7 +138,9 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -135,7 +138,9 @@ import java.util.concurrent.atomic.AtomicInteger;
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
currentFrame = null; currentFrame = null;
if (inputStreamEnded && pendingFrames.isEmpty()) { if (currentInputStreamEnded && pendingFrames.isEmpty()) {
// Reset because there could be further input streams after the current one ends.
currentInputStreamEnded = false;
externalShaderProgram.signalEndOfCurrentInputStream(); externalShaderProgram.signalEndOfCurrentInputStream();
} else { } else {
maybeQueueFrameToExternalShaderProgram(); maybeQueueFrameToExternalShaderProgram();
...@@ -177,17 +182,24 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -177,17 +182,24 @@ import java.util.concurrent.atomic.AtomicInteger;
} }
@Override @Override
public void signalEndOfInput() { public void signalEndOfCurrentInputStream() {
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
inputStreamEnded = true;
if (pendingFrames.isEmpty() && currentFrame == null) { if (pendingFrames.isEmpty() && currentFrame == null) {
externalShaderProgram.signalEndOfCurrentInputStream(); externalShaderProgram.signalEndOfCurrentInputStream();
} else {
currentInputStreamEnded = true;
} }
}); });
} }
@Override @Override
public void signalEndOfInput() {
// TODO(b/274109008) Consider remove inputStreamEnded boolean.
videoFrameProcessingTaskExecutor.submit(() -> inputStreamEnded = true);
}
@Override
public void release() { public void release() {
surfaceTexture.release(); surfaceTexture.release();
surface.release(); surface.release();
......
...@@ -65,6 +65,15 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -65,6 +65,15 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
*/ */
/* package */ final class FinalShaderProgramWrapper implements ExternalShaderProgram { /* package */ final class FinalShaderProgramWrapper implements ExternalShaderProgram {
/** Listener interface for the current input stream ending. */
interface OnInputStreamProcessedListener {
/**
* Returns whether {@link FinalShaderProgramWrapper} should invoke {@link
* VideoFrameProcessor.Listener#signalEndOfInput}.
*/
boolean onInputStreamProcessed();
}
private static final String TAG = "FinalShaderWrapper"; private static final String TAG = "FinalShaderWrapper";
private final Context context; private final Context context;
...@@ -94,6 +103,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -94,6 +103,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private @MonotonicNonNull Size outputSizeBeforeSurfaceTransformation; private @MonotonicNonNull Size outputSizeBeforeSurfaceTransformation;
@Nullable private SurfaceView debugSurfaceView; @Nullable private SurfaceView debugSurfaceView;
@Nullable private GlTextureInfo outputTexture; @Nullable private GlTextureInfo outputTexture;
@Nullable private OnInputStreamProcessedListener onInputStreamProcessedListener;
private boolean frameProcessingStarted; private boolean frameProcessingStarted;
private volatile boolean outputSurfaceInfoChanged; private volatile boolean outputSurfaceInfoChanged;
...@@ -172,10 +182,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -172,10 +182,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
public void setOnInputStreamProcessedListener(
@Nullable OnInputStreamProcessedListener onInputStreamProcessedListener) {
this.onInputStreamProcessedListener = onInputStreamProcessedListener;
}
@Override @Override
public void signalEndOfCurrentInputStream() { public void signalEndOfCurrentInputStream() {
frameProcessingStarted = true; frameProcessingStarted = true;
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded); boolean frameProcessingEnded =
checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed();
if (frameProcessingEnded) {
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
}
} }
// Methods that must be called on the GL thread. // Methods that must be called on the GL thread.
......
...@@ -70,6 +70,14 @@ import com.google.android.exoplayer2.util.VideoFrameProcessor; ...@@ -70,6 +70,14 @@ import com.google.android.exoplayer2.util.VideoFrameProcessor;
int getPendingFrameCount(); int getPendingFrameCount();
/** /**
* Signals the end of the current input stream.
*
* <p>This method must be called on the last input stream, before calling {@link
* #signalEndOfInput}.
*/
void signalEndOfCurrentInputStream();
/**
* Signals the end of the input. * Signals the end of the input.
* *
* @see VideoFrameProcessor#signalEndOfInput() * @see VideoFrameProcessor#signalEndOfInput()
......
...@@ -218,6 +218,8 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -218,6 +218,8 @@ import org.checkerframework.dataflow.qual.Pure;
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio) .setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(mediaItemOffsetUs.get()) .setOffsetToAddUs(mediaItemOffsetUs.get())
.build()); .build());
videoFrameProcessor.registerInputStream(
MimeTypes.isVideo(trackFormat.sampleMimeType) ? INPUT_TYPE_SURFACE : INPUT_TYPE_BITMAP);
} }
mediaItemOffsetUs.addAndGet(durationUs); mediaItemOffsetUs.addAndGet(durationUs);
} }
......
...@@ -20,6 +20,7 @@ import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.createA ...@@ -20,6 +20,7 @@ import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.createA
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.maybeSaveTestBitmap; import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_SURFACE; import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
...@@ -322,6 +323,7 @@ public final class VideoFrameProcessorTestRunner { ...@@ -322,6 +323,7 @@ public final class VideoFrameProcessorTestRunner {
mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)) mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
.setPixelWidthHeightRatio(pixelWidthHeightRatio) .setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build()); .build());
videoFrameProcessor.registerInputStream(INPUT_TYPE_SURFACE);
videoFrameProcessor.registerInputFrame(); videoFrameProcessor.registerInputFrame();
} }
...@@ -341,6 +343,7 @@ public final class VideoFrameProcessorTestRunner { ...@@ -341,6 +343,7 @@ public final class VideoFrameProcessorTestRunner {
.setPixelWidthHeightRatio(pixelWidthHeightRatio) .setPixelWidthHeightRatio(pixelWidthHeightRatio)
.setOffsetToAddUs(offsetToAddUs) .setOffsetToAddUs(offsetToAddUs)
.build()); .build());
videoFrameProcessor.registerInputStream(INPUT_TYPE_BITMAP);
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate); videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment