Commit 65662371 by andrewlewis Committed by tonihei

Mark methods needing to be called on GL thread

Also remove @WorkerThread annotations, as static checks associated with
this annotation aren't useful in this part of the codebase because
almost no methods are called on the main thread.

This change should be a no-op.

PiperOrigin-RevId: 512060367
parent 51f8d103
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
package com.google.android.exoplayer2.effect; package com.google.android.exoplayer2.effect;
import android.content.Context; import android.content.Context;
import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
...@@ -39,9 +38,7 @@ public interface ColorLut extends GlEffect { ...@@ -39,9 +38,7 @@ public interface ColorLut extends GlEffect {
/** Releases the OpenGL texture of the LUT. */ /** Releases the OpenGL texture of the LUT. */
void release() throws GlUtil.GlException; void release() throws GlUtil.GlException;
/** This method must be executed on the same thread as other GL commands. */
@Override @Override
@WorkerThread
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr); return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr);
......
...@@ -31,7 +31,6 @@ import android.opengl.GLES30; ...@@ -31,7 +31,6 @@ import android.opengl.GLES30;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting; import androidx.annotation.VisibleForTesting;
import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.DebugViewProvider; import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect; import com.google.android.exoplayer2.util.Effect;
...@@ -151,6 +150,201 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -151,6 +150,201 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
} }
} }
private static final String TAG = "DefaultFrameProcessor";
private static final String THREAD_NAME = "Effect:GlThread";
private static final long RELEASE_WAIT_TIME_MS = 100;
private final EGLDisplay eglDisplay;
private final EGLContext eglContext;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private @MonotonicNonNull InternalTextureManager inputInternalTextureManager;
private @MonotonicNonNull ExternalTextureManager inputExternalTextureManager;
private final boolean releaseFramesAutomatically;
private final FinalShaderProgramWrapper finalShaderProgramWrapper;
private final ImmutableList<GlShaderProgram> allShaderPrograms;
/**
* Offset compared to original media presentation time that has been added to incoming frame
* timestamps, in microseconds.
*/
private long previousStreamOffsetUs;
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
private volatile boolean inputStreamEnded;
private DefaultVideoFrameProcessor(
EGLDisplay eglDisplay,
EGLContext eglContext,
boolean isInputTextureExternal,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
ImmutableList<GlShaderProgram> shaderPrograms,
boolean releaseFramesAutomatically)
throws VideoFrameProcessingException {
this.eglDisplay = eglDisplay;
this.eglContext = eglContext;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
this.releaseFramesAutomatically = releaseFramesAutomatically;
checkState(!shaderPrograms.isEmpty());
checkState(getLast(shaderPrograms) instanceof FinalShaderProgramWrapper);
GlShaderProgram inputShaderProgram = shaderPrograms.get(0);
if (isInputTextureExternal) {
checkState(inputShaderProgram instanceof ExternalShaderProgram);
inputExternalTextureManager =
new ExternalTextureManager(
(ExternalShaderProgram) inputShaderProgram, videoFrameProcessingTaskExecutor);
inputShaderProgram.setInputListener(inputExternalTextureManager);
} else {
inputInternalTextureManager =
new InternalTextureManager(inputShaderProgram, videoFrameProcessingTaskExecutor);
inputShaderProgram.setInputListener(inputInternalTextureManager);
}
finalShaderProgramWrapper = (FinalShaderProgramWrapper) getLast(shaderPrograms);
allShaderPrograms = shaderPrograms;
previousStreamOffsetUs = C.TIME_UNSET;
}
/** Returns the task executor that runs video frame processing tasks. */
@VisibleForTesting
/* package */ VideoFrameProcessingTaskExecutor getTaskExecutor() {
return videoFrameProcessingTaskExecutor;
}
/**
* Sets the default size for input buffers, for the case where the producer providing input does
* not override the buffer size.
*
* <p>When input comes from a media codec it's not necessary to call this method because the codec
* (producer) sets the buffer size automatically. For the case where input comes from CameraX,
* call this method after instantiation to ensure that buffers are handled at full resolution. See
* {@link SurfaceTexture#setDefaultBufferSize(int, int)} for more information.
*
* <p>This method should only be used for when the {@link VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}.
*
* @param width The default width for input buffers, in pixels.
* @param height The default height for input buffers, in pixels.
*/
public void setInputDefaultBufferSize(int width, int height) {
checkNotNull(inputExternalTextureManager).setDefaultBufferSize(width, height);
}
@Override
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate) {
checkNotNull(inputInternalTextureManager)
.queueInputBitmap(inputBitmap, durationUs, frameRate, /* useHdr= */ false);
}
@Override
public Surface getInputSurface() {
return checkNotNull(inputExternalTextureManager).getInputSurface();
}
@Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) {
nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo);
if (nextInputFrameInfo.streamOffsetUs != previousStreamOffsetUs) {
finalShaderProgramWrapper.appendStream(nextInputFrameInfo.streamOffsetUs);
previousStreamOffsetUs = nextInputFrameInfo.streamOffsetUs;
}
}
@Override
public void registerInputFrame() {
checkState(!inputStreamEnded);
checkStateNotNull(
nextInputFrameInfo, "setInputFrameInfo must be called before registering input frames");
checkNotNull(inputExternalTextureManager).registerInputFrame(nextInputFrameInfo);
}
@Override
public int getPendingInputFrameCount() {
return checkNotNull(inputExternalTextureManager).getPendingFrameCount();
}
@Override
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
finalShaderProgramWrapper.setOutputSurfaceInfo(outputSurfaceInfo);
}
@Override
public void releaseOutputFrame(long releaseTimeNs) {
checkState(
!releaseFramesAutomatically,
"Calling this method is not allowed when releaseFramesAutomatically is enabled");
videoFrameProcessingTaskExecutor.submitWithHighPriority(
() -> finalShaderProgramWrapper.releaseOutputFrame(releaseTimeNs));
}
@Override
public void signalEndOfInput() {
checkState(!inputStreamEnded);
inputStreamEnded = true;
if (inputInternalTextureManager != null) {
videoFrameProcessingTaskExecutor.submit(inputInternalTextureManager::signalEndOfInput);
}
if (inputExternalTextureManager != null) {
videoFrameProcessingTaskExecutor.submit(inputExternalTextureManager::signalEndOfInput);
}
}
@Override
public void flush() {
try {
videoFrameProcessingTaskExecutor.flush();
CountDownLatch latch = new CountDownLatch(1);
checkNotNull(inputExternalTextureManager).setOnFlushCompleteListener(latch::countDown);
videoFrameProcessingTaskExecutor.submit(finalShaderProgramWrapper::flush);
latch.await();
inputExternalTextureManager.setOnFlushCompleteListener(null);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
@Override
public void release() {
try {
videoFrameProcessingTaskExecutor.release(
/* releaseTask= */ this::releaseShaderProgramsAndDestroyGlContext, RELEASE_WAIT_TIME_MS);
} catch (InterruptedException unexpected) {
Thread.currentThread().interrupt();
throw new IllegalStateException(unexpected);
}
if (inputExternalTextureManager != null) {
inputExternalTextureManager.release();
}
}
/**
* Expands the frame based on the {@link FrameInfo#pixelWidthHeightRatio} and returns a new {@link
* FrameInfo} instance with scaled dimensions and {@link FrameInfo#pixelWidthHeightRatio} of
* {@code 1}.
*/
private FrameInfo adjustForPixelWidthHeightRatio(FrameInfo frameInfo) {
if (frameInfo.pixelWidthHeightRatio > 1f) {
return new FrameInfo.Builder(frameInfo)
.setWidth((int) (frameInfo.width * frameInfo.pixelWidthHeightRatio))
.setPixelWidthHeightRatio(1)
.build();
} else if (frameInfo.pixelWidthHeightRatio < 1f) {
return new FrameInfo.Builder(frameInfo)
.setHeight((int) (frameInfo.height / frameInfo.pixelWidthHeightRatio))
.setPixelWidthHeightRatio(1)
.build();
} else {
return frameInfo;
}
}
// Methods that must be called on the GL thread.
/** /**
* Creates the OpenGL context, surfaces, textures, and frame buffers, initializes {@link * Creates the OpenGL context, surfaces, textures, and frame buffers, initializes {@link
* GlShaderProgram} instances corresponding to the {@link GlEffect} instances, and returns a new * GlShaderProgram} instances corresponding to the {@link GlEffect} instances, and returns a new
...@@ -161,7 +355,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -161,7 +355,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
* <p>This method must be executed using the {@code singleThreadExecutorService}, as later OpenGL * <p>This method must be executed using the {@code singleThreadExecutorService}, as later OpenGL
* commands will be called on that thread. * commands will be called on that thread.
*/ */
@WorkerThread
private static DefaultVideoFrameProcessor createOpenGlObjectsAndFrameProcessor( private static DefaultVideoFrameProcessor createOpenGlObjectsAndFrameProcessor(
Context context, Context context,
List<Effect> effects, List<Effect> effects,
...@@ -349,206 +542,11 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -349,206 +542,11 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
} }
} }
private static final String TAG = "DefaultFrameProcessor";
private static final String THREAD_NAME = "Effect:GlThread";
private static final long RELEASE_WAIT_TIME_MS = 100;
private final EGLDisplay eglDisplay;
private final EGLContext eglContext;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private @MonotonicNonNull InternalTextureManager inputInternalTextureManager;
private @MonotonicNonNull ExternalTextureManager inputExternalTextureManager;
private final boolean releaseFramesAutomatically;
private final FinalShaderProgramWrapper finalShaderProgramWrapper;
private final ImmutableList<GlShaderProgram> allShaderPrograms;
/**
* Offset compared to original media presentation time that has been added to incoming frame
* timestamps, in microseconds.
*/
private long previousStreamOffsetUs;
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
private volatile boolean inputStreamEnded;
private DefaultVideoFrameProcessor(
EGLDisplay eglDisplay,
EGLContext eglContext,
boolean isInputTextureExternal,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
ImmutableList<GlShaderProgram> shaderPrograms,
boolean releaseFramesAutomatically)
throws VideoFrameProcessingException {
this.eglDisplay = eglDisplay;
this.eglContext = eglContext;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
this.releaseFramesAutomatically = releaseFramesAutomatically;
checkState(!shaderPrograms.isEmpty());
checkState(getLast(shaderPrograms) instanceof FinalShaderProgramWrapper);
GlShaderProgram inputShaderProgram = shaderPrograms.get(0);
if (isInputTextureExternal) {
checkState(inputShaderProgram instanceof ExternalShaderProgram);
inputExternalTextureManager =
new ExternalTextureManager(
(ExternalShaderProgram) inputShaderProgram, videoFrameProcessingTaskExecutor);
inputShaderProgram.setInputListener(inputExternalTextureManager);
} else {
inputInternalTextureManager =
new InternalTextureManager(inputShaderProgram, videoFrameProcessingTaskExecutor);
inputShaderProgram.setInputListener(inputInternalTextureManager);
}
finalShaderProgramWrapper = (FinalShaderProgramWrapper) getLast(shaderPrograms);
allShaderPrograms = shaderPrograms;
previousStreamOffsetUs = C.TIME_UNSET;
}
/** Returns the task executor that runs video frame processing tasks. */
@VisibleForTesting
/* package */ VideoFrameProcessingTaskExecutor getTaskExecutor() {
return videoFrameProcessingTaskExecutor;
}
/**
* Sets the default size for input buffers, for the case where the producer providing input does
* not override the buffer size.
*
* <p>When input comes from a media codec it's not necessary to call this method because the codec
* (producer) sets the buffer size automatically. For the case where input comes from CameraX,
* call this method after instantiation to ensure that buffers are handled at full resolution. See
* {@link SurfaceTexture#setDefaultBufferSize(int, int)} for more information.
*
* <p>This method should only be used for when the {@link VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}.
*
* @param width The default width for input buffers, in pixels.
* @param height The default height for input buffers, in pixels.
*/
public void setInputDefaultBufferSize(int width, int height) {
checkNotNull(inputExternalTextureManager).setDefaultBufferSize(width, height);
}
@Override
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate) {
checkNotNull(inputInternalTextureManager)
.queueInputBitmap(inputBitmap, durationUs, frameRate, /* useHdr= */ false);
}
@Override
public Surface getInputSurface() {
return checkNotNull(inputExternalTextureManager).getInputSurface();
}
@Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) {
nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo);
if (nextInputFrameInfo.streamOffsetUs != previousStreamOffsetUs) {
finalShaderProgramWrapper.appendStream(nextInputFrameInfo.streamOffsetUs);
previousStreamOffsetUs = nextInputFrameInfo.streamOffsetUs;
}
}
@Override
public void registerInputFrame() {
checkState(!inputStreamEnded);
checkStateNotNull(
nextInputFrameInfo, "setInputFrameInfo must be called before registering input frames");
checkNotNull(inputExternalTextureManager).registerInputFrame(nextInputFrameInfo);
}
@Override
public int getPendingInputFrameCount() {
return checkNotNull(inputExternalTextureManager).getPendingFrameCount();
}
@Override
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
finalShaderProgramWrapper.setOutputSurfaceInfo(outputSurfaceInfo);
}
@Override
public void releaseOutputFrame(long releaseTimeNs) {
checkState(
!releaseFramesAutomatically,
"Calling this method is not allowed when releaseFramesAutomatically is enabled");
videoFrameProcessingTaskExecutor.submitWithHighPriority(
() -> finalShaderProgramWrapper.releaseOutputFrame(releaseTimeNs));
}
@Override
public void signalEndOfInput() {
checkState(!inputStreamEnded);
inputStreamEnded = true;
if (inputInternalTextureManager != null) {
videoFrameProcessingTaskExecutor.submit(inputInternalTextureManager::signalEndOfInput);
}
if (inputExternalTextureManager != null) {
videoFrameProcessingTaskExecutor.submit(inputExternalTextureManager::signalEndOfInput);
}
}
@Override
public void flush() {
try {
videoFrameProcessingTaskExecutor.flush();
CountDownLatch latch = new CountDownLatch(1);
checkNotNull(inputExternalTextureManager).setOnFlushCompleteListener(latch::countDown);
videoFrameProcessingTaskExecutor.submit(finalShaderProgramWrapper::flush);
latch.await();
inputExternalTextureManager.setOnFlushCompleteListener(null);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
@Override
public void release() {
try {
videoFrameProcessingTaskExecutor.release(
/* releaseTask= */ this::releaseShaderProgramsAndDestroyGlContext, RELEASE_WAIT_TIME_MS);
} catch (InterruptedException unexpected) {
Thread.currentThread().interrupt();
throw new IllegalStateException(unexpected);
}
if (inputExternalTextureManager != null) {
inputExternalTextureManager.release();
}
}
/**
* Expands the frame based on the {@link FrameInfo#pixelWidthHeightRatio} and returns a new {@link
* FrameInfo} instance with scaled dimensions and {@link FrameInfo#pixelWidthHeightRatio} of
* {@code 1}.
*/
private FrameInfo adjustForPixelWidthHeightRatio(FrameInfo frameInfo) {
if (frameInfo.pixelWidthHeightRatio > 1f) {
return new FrameInfo.Builder(frameInfo)
.setWidth((int) (frameInfo.width * frameInfo.pixelWidthHeightRatio))
.setPixelWidthHeightRatio(1)
.build();
} else if (frameInfo.pixelWidthHeightRatio < 1f) {
return new FrameInfo.Builder(frameInfo)
.setHeight((int) (frameInfo.height / frameInfo.pixelWidthHeightRatio))
.setPixelWidthHeightRatio(1)
.build();
} else {
return frameInfo;
}
}
/** /**
* Releases the {@link GlShaderProgram} instances and destroys the OpenGL context. * Releases the {@link GlShaderProgram} instances and destroys the OpenGL context.
* *
* <p>This method must be called on the {@linkplain #THREAD_NAME background thread}. * <p>This method must be called on the {@linkplain #THREAD_NAME background thread}.
*/ */
@WorkerThread
private void releaseShaderProgramsAndDestroyGlContext() { private void releaseShaderProgramsAndDestroyGlContext() {
try { try {
for (int i = 0; i < allShaderPrograms.size(); i++) { for (int i = 0; i < allShaderPrograms.size(); i++) {
......
...@@ -20,7 +20,6 @@ import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; ...@@ -20,7 +20,6 @@ import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.effect.GlShaderProgram.InputListener; import com.google.android.exoplayer2.effect.GlShaderProgram.InputListener;
import com.google.android.exoplayer2.util.FrameInfo; import com.google.android.exoplayer2.util.FrameInfo;
...@@ -186,7 +185,15 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -186,7 +185,15 @@ import java.util.concurrent.atomic.AtomicInteger;
surface.release(); surface.release();
} }
@WorkerThread private void maybeExecuteAfterFlushTask() {
if (onFlushCompleteTask == null || numberOfFramesToDropOnBecomingAvailable > 0) {
return;
}
videoFrameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask);
}
// Methods that must be called on the GL thread.
private void flush() { private void flush() {
// A frame that is registered before flush may arrive after flush. // A frame that is registered before flush may arrive after flush.
numberOfFramesToDropOnBecomingAvailable = pendingFrames.size() - availableFrameCount; numberOfFramesToDropOnBecomingAvailable = pendingFrames.size() - availableFrameCount;
...@@ -200,14 +207,6 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -200,14 +207,6 @@ import java.util.concurrent.atomic.AtomicInteger;
maybeExecuteAfterFlushTask(); maybeExecuteAfterFlushTask();
} }
private void maybeExecuteAfterFlushTask() {
if (onFlushCompleteTask == null || numberOfFramesToDropOnBecomingAvailable > 0) {
return;
}
videoFrameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask);
}
@WorkerThread
private void maybeQueueFrameToExternalShaderProgram() { private void maybeQueueFrameToExternalShaderProgram() {
if (externalShaderProgramInputCapacity.get() == 0 if (externalShaderProgramInputCapacity.get() == 0
|| availableFrameCount == 0 || availableFrameCount == 0
......
...@@ -32,7 +32,6 @@ import android.view.SurfaceHolder; ...@@ -32,7 +32,6 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView; import android.view.SurfaceView;
import androidx.annotation.GuardedBy; import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.DebugViewProvider; import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
...@@ -153,6 +152,30 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -153,6 +152,30 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
@Override @Override
public void signalEndOfCurrentInputStream() {
checkState(!streamOffsetUsQueue.isEmpty(), "No input stream to end.");
streamOffsetUsQueue.remove();
if (streamOffsetUsQueue.isEmpty()) {
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
}
}
/**
* Signals that there will be another input stream after all previously appended input streams
* have {@linkplain #signalEndOfCurrentInputStream() ended}.
*
* <p>This method does not need to be called on the GL thread, but the caller must ensure that
* stream offsets are appended in the correct order.
*
* @param streamOffsetUs The presentation timestamp offset, in microseconds.
*/
public void appendStream(long streamOffsetUs) {
streamOffsetUsQueue.add(streamOffsetUs);
}
// Methods that must be called on the GL thread.
@Override
public void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs) { public void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs) {
long streamOffsetUs = long streamOffsetUs =
checkStateNotNull(streamOffsetUsQueue.peek(), "No input stream specified."); checkStateNotNull(streamOffsetUsQueue.peek(), "No input stream specified.");
...@@ -174,7 +197,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -174,7 +197,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@WorkerThread
public void releaseOutputFrame(long releaseTimeNs) { public void releaseOutputFrame(long releaseTimeNs) {
checkState(!releaseFramesAutomatically); checkState(!releaseFramesAutomatically);
Pair<TextureInfo, Long> oldestAvailableFrame = availableFrames.remove(); Pair<TextureInfo, Long> oldestAvailableFrame = availableFrames.remove();
...@@ -185,15 +207,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -185,15 +207,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
@Override @Override
public void signalEndOfCurrentInputStream() {
checkState(!streamOffsetUsQueue.isEmpty(), "No input stream to end.");
streamOffsetUsQueue.remove();
if (streamOffsetUsQueue.isEmpty()) {
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
}
}
@Override
public void flush() { public void flush() {
// Drops all frames that aren't released yet. // Drops all frames that aren't released yet.
availableFrames.clear(); availableFrames.clear();
...@@ -205,19 +218,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -205,19 +218,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
@Override @Override
@WorkerThread
public synchronized void release() throws VideoFrameProcessingException {
if (defaultShaderProgram != null) {
defaultShaderProgram.release();
}
try {
GlUtil.destroyEglSurface(eglDisplay, outputEglSurface);
} catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e);
}
}
@Override
public void setTextureTransformMatrix(float[] textureTransformMatrix) { public void setTextureTransformMatrix(float[] textureTransformMatrix) {
System.arraycopy( System.arraycopy(
/* src= */ textureTransformMatrix, /* src= */ textureTransformMatrix,
...@@ -231,17 +231,16 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -231,17 +231,16 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
} }
/** @Override
* Signals that there will be another input stream after all previously appended input streams public synchronized void release() throws VideoFrameProcessingException {
* have {@linkplain #signalEndOfCurrentInputStream() ended}. if (defaultShaderProgram != null) {
* defaultShaderProgram.release();
* <p>This method does not need to be called on the GL thread, but the caller must ensure that }
* stream offsets are appended in the correct order. try {
* GlUtil.destroyEglSurface(eglDisplay, outputEglSurface);
* @param streamOffsetUs The presentation timestamp offset, in microseconds. } catch (GlUtil.GlException e) {
*/ throw new VideoFrameProcessingException(e);
public void appendStream(long streamOffsetUs) { }
streamOffsetUsQueue.add(streamOffsetUs);
} }
/** /**
...@@ -495,12 +494,36 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -495,12 +494,36 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
height = surfaceView.getHeight(); height = surfaceView.getHeight();
} }
@Override
public void surfaceCreated(SurfaceHolder holder) {}
@Override
public synchronized void surfaceChanged(
SurfaceHolder holder, int format, int width, int height) {
this.width = width;
this.height = height;
Surface newSurface = holder.getSurface();
if (surface == null || !surface.equals(newSurface)) {
surface = newSurface;
eglSurface = null;
}
}
@Override
public synchronized void surfaceDestroyed(SurfaceHolder holder) {
surface = null;
eglSurface = null;
width = C.LENGTH_UNSET;
height = C.LENGTH_UNSET;
}
/** /**
* Focuses the wrapped surface view's surface as an {@link EGLSurface}, renders using {@code * Focuses the wrapped surface view's surface as an {@link EGLSurface}, renders using {@code
* renderingTask} and swaps buffers, if the view's holder has a valid surface. Does nothing * renderingTask} and swaps buffers, if the view's holder has a valid surface. Does nothing
* otherwise. * otherwise.
*
* <p>Must be called on the GL thread.
*/ */
@WorkerThread
public synchronized void maybeRenderToSurfaceView(VideoFrameProcessingTask renderingTask) public synchronized void maybeRenderToSurfaceView(VideoFrameProcessingTask renderingTask)
throws GlUtil.GlException, VideoFrameProcessingException { throws GlUtil.GlException, VideoFrameProcessingException {
if (surface == null) { if (surface == null) {
...@@ -519,28 +542,5 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -519,28 +542,5 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// Prevents white flashing on the debug SurfaceView when frames are rendered too fast. // Prevents white flashing on the debug SurfaceView when frames are rendered too fast.
GLES20.glFinish(); GLES20.glFinish();
} }
@Override
public void surfaceCreated(SurfaceHolder holder) {}
@Override
public synchronized void surfaceChanged(
SurfaceHolder holder, int format, int width, int height) {
this.width = width;
this.height = height;
Surface newSurface = holder.getSurface();
if (surface == null || !surface.equals(newSurface)) {
surface = newSurface;
eglSurface = null;
}
}
@Override
public synchronized void surfaceDestroyed(SurfaceHolder holder) {
surface = null;
eglSurface = null;
width = C.LENGTH_UNSET;
height = C.LENGTH_UNSET;
}
} }
} }
...@@ -21,7 +21,6 @@ import static java.lang.Math.floor; ...@@ -21,7 +21,6 @@ import static java.lang.Math.floor;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLUtils; import android.opengl.GLUtils;
import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
...@@ -34,7 +33,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -34,7 +33,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* Forwards a video frame produced from a {@link Bitmap} to a {@link GlShaderProgram} for * Forwards a video frame produced from a {@link Bitmap} to a {@link GlShaderProgram} for
* consumption. * consumption.
* *
* <p>Methods in this class can be called from any thread. * <p>Public methods in this class can be called from any thread.
*/ */
/* package */ final class InternalTextureManager implements GlShaderProgram.InputListener { /* package */ final class InternalTextureManager implements GlShaderProgram.InputListener {
private final GlShaderProgram shaderProgram; private final GlShaderProgram shaderProgram;
...@@ -99,7 +98,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -99,7 +98,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}); });
} }
@WorkerThread // Methods that must be called on the GL thread.
private void setupBitmap(Bitmap bitmap, long durationUs, float frameRate, boolean useHdr) private void setupBitmap(Bitmap bitmap, long durationUs, float frameRate, boolean useHdr)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
this.useHdr = useHdr; this.useHdr = useHdr;
...@@ -113,7 +113,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -113,7 +113,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
maybeQueueToShaderProgram(); maybeQueueToShaderProgram();
} }
@WorkerThread
private void maybeQueueToShaderProgram() throws VideoFrameProcessingException { private void maybeQueueToShaderProgram() throws VideoFrameProcessingException {
if (pendingBitmaps.isEmpty() || downstreamShaderProgramCapacity == 0) { if (pendingBitmaps.isEmpty() || downstreamShaderProgramCapacity == 0) {
return; return;
...@@ -156,7 +155,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -156,7 +155,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
} }
@WorkerThread
private void maybeSignalEndOfOutput() { private void maybeSignalEndOfOutput() {
if (framesToQueueForCurrentBitmap == 0 if (framesToQueueForCurrentBitmap == 0
&& pendingBitmaps.isEmpty() && pendingBitmaps.isEmpty()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment