Commit 413f61b9 by huangdarwin Committed by christosts

Effect: Rename FrameProcessor

Rename FrameProcessor to VideoFrameProcessor, and GlEffectsFrameProcessor to
DefaultVideoFrameProcessor.

Most changes are semi-mechanical, semi-manual find-replace, preserving case:
* "FrameProc" -> "VideoFrameProc" (ex. FrameProcessor -> VideoFrameProcessor, and
   FrameProcessingException -> VideoFrameProcessingException)
* "GlEffectsVideoFrameProc" -> "DefaultVideoFrameProc"

PiperOrigin-RevId: 509887384
parent 3e5ae92b
Showing with 484 additions and 471 deletions
...@@ -20,10 +20,10 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument; ...@@ -20,10 +20,10 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import com.google.android.exoplayer2.effect.SingleFrameGlShaderProgram; import com.google.android.exoplayer2.effect.SingleFrameGlShaderProgram;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram; import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException; import java.io.IOException;
/** /**
...@@ -59,7 +59,7 @@ import java.io.IOException; ...@@ -59,7 +59,7 @@ import java.io.IOException;
* @param minInnerRadius The lower bound of the radius that is unaffected by the effect. * @param minInnerRadius The lower bound of the radius that is unaffected by the effect.
* @param maxInnerRadius The upper bound of the radius that is unaffected by the effect. * @param maxInnerRadius The upper bound of the radius that is unaffected by the effect.
* @param outerRadius The radius after which all pixels are black. * @param outerRadius The radius after which all pixels are black.
* @throws FrameProcessingException If a problem occurs while reading shader files. * @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/ */
public PeriodicVignetteShaderProgram( public PeriodicVignetteShaderProgram(
Context context, Context context,
...@@ -69,7 +69,7 @@ import java.io.IOException; ...@@ -69,7 +69,7 @@ import java.io.IOException;
float minInnerRadius, float minInnerRadius,
float maxInnerRadius, float maxInnerRadius,
float outerRadius) float outerRadius)
throws FrameProcessingException { throws VideoFrameProcessingException {
super(useHdr); super(useHdr);
checkArgument(minInnerRadius <= maxInnerRadius); checkArgument(minInnerRadius <= maxInnerRadius);
checkArgument(maxInnerRadius <= outerRadius); checkArgument(maxInnerRadius <= outerRadius);
...@@ -78,7 +78,7 @@ import java.io.IOException; ...@@ -78,7 +78,7 @@ import java.io.IOException;
try { try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH); glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
glProgram.setFloatsUniform("uCenter", new float[] {centerX, centerY}); glProgram.setFloatsUniform("uCenter", new float[] {centerX, centerY});
glProgram.setFloatsUniform("uOuterRadius", new float[] {outerRadius}); glProgram.setFloatsUniform("uOuterRadius", new float[] {outerRadius});
...@@ -95,7 +95,8 @@ import java.io.IOException; ...@@ -95,7 +95,8 @@ import java.io.IOException;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try { try {
glProgram.use(); glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
...@@ -107,17 +108,17 @@ import java.io.IOException; ...@@ -107,17 +108,17 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad. // The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs); throw new VideoFrameProcessingException(e, presentationTimeUs);
} }
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
super.release(); super.release();
try { try {
glProgram.delete(); glProgram.delete();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
} }
...@@ -26,9 +26,9 @@ import androidx.annotation.Nullable; ...@@ -26,9 +26,9 @@ import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.effect.GlShaderProgram; import com.google.android.exoplayer2.effect.GlShaderProgram;
import com.google.android.exoplayer2.effect.TextureInfo; import com.google.android.exoplayer2.effect.TextureInfo;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.LibraryLoader; import com.google.android.exoplayer2.util.LibraryLoader;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import com.google.mediapipe.components.FrameProcessor; import com.google.mediapipe.components.FrameProcessor;
import com.google.mediapipe.framework.AppTextureFrame; import com.google.mediapipe.framework.AppTextureFrame;
...@@ -112,7 +112,7 @@ import java.util.concurrent.Future; ...@@ -112,7 +112,7 @@ import java.util.concurrent.Future;
futures = new ArrayDeque<>(); futures = new ArrayDeque<>();
inputListener = new InputListener() {}; inputListener = new InputListener() {};
outputListener = new OutputListener() {}; outputListener = new OutputListener() {};
errorListener = (frameProcessingException) -> {}; errorListener = (videoFrameProcessingException) -> {};
errorListenerExecutor = MoreExecutors.directExecutor(); errorListenerExecutor = MoreExecutors.directExecutor();
EglManager eglManager = new EglManager(EGL14.eglGetCurrentContext()); EglManager eglManager = new EglManager(EGL14.eglGetCurrentContext());
frameProcessor = frameProcessor =
...@@ -155,7 +155,7 @@ import java.util.concurrent.Future; ...@@ -155,7 +155,7 @@ import java.util.concurrent.Future;
frameProcessor.setAsynchronousErrorListener( frameProcessor.setAsynchronousErrorListener(
error -> error ->
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(error)))); () -> errorListener.onError(new VideoFrameProcessingException(error))));
} }
@Override @Override
...@@ -191,7 +191,7 @@ import java.util.concurrent.Future; ...@@ -191,7 +191,7 @@ import java.util.concurrent.Future;
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e))); () -> errorListener.onError(new VideoFrameProcessingException(e)));
} }
if (acceptedFrame) { if (acceptedFrame) {
inputListener.onInputFrameProcessed(inputTexture); inputListener.onInputFrameProcessed(inputTexture);
...@@ -213,9 +213,7 @@ import java.util.concurrent.Future; ...@@ -213,9 +213,7 @@ import java.util.concurrent.Future;
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
if (errorListener != null) { if (errorListener != null) {
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> () -> errorListener.onError(new VideoFrameProcessingException(e)));
errorListener.onFrameProcessingError(
new FrameProcessingException(e)));
} }
} }
} }
...@@ -254,14 +252,12 @@ import java.util.concurrent.Future; ...@@ -254,14 +252,12 @@ import java.util.concurrent.Future;
try { try {
if (!singleThreadExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS)) { if (!singleThreadExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS)) {
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> () -> errorListener.onError(new VideoFrameProcessingException("Release timed out")));
errorListener.onFrameProcessingError(
new FrameProcessingException("Release timed out")));
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e))); () -> errorListener.onError(new VideoFrameProcessingException(e)));
} }
frameProcessor.close(); frameProcessor.close();
...@@ -294,11 +290,11 @@ import java.util.concurrent.Future; ...@@ -294,11 +290,11 @@ import java.util.concurrent.Future;
futures.remove().get(); futures.remove().get();
} catch (ExecutionException e) { } catch (ExecutionException e) {
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e))); () -> errorListener.onError(new VideoFrameProcessingException(e)));
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e))); () -> errorListener.onError(new VideoFrameProcessingException(e)));
} }
} }
} }
......
...@@ -229,10 +229,10 @@ public class PlaybackException extends Exception implements Bundleable { ...@@ -229,10 +229,10 @@ public class PlaybackException extends Exception implements Bundleable {
// Frame processing errors (7xxx). // Frame processing errors (7xxx).
/** Caused by a failure when initializing a {@link FrameProcessor}. */ /** Caused by a failure when initializing a {@link VideoFrameProcessor}. */
public static final int ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED = 7000; public static final int ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED = 7000;
/** Caused by a failure when processing a frame. */ /** Caused by a failure when processing a video frame. */
public static final int ERROR_CODE_FRAME_PROCESSING_FAILED = 7001; public static final int ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED = 7001;
/** /**
* Player implementations that want to surface custom errors can use error codes greater than this * Player implementations that want to surface custom errors can use error codes greater than this
...@@ -311,10 +311,10 @@ public class PlaybackException extends Exception implements Bundleable { ...@@ -311,10 +311,10 @@ public class PlaybackException extends Exception implements Bundleable {
return "ERROR_CODE_DRM_DEVICE_REVOKED"; return "ERROR_CODE_DRM_DEVICE_REVOKED";
case ERROR_CODE_DRM_LICENSE_EXPIRED: case ERROR_CODE_DRM_LICENSE_EXPIRED:
return "ERROR_CODE_DRM_LICENSE_EXPIRED"; return "ERROR_CODE_DRM_LICENSE_EXPIRED";
case ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED: case ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED:
return "ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED"; return "ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED";
case ERROR_CODE_FRAME_PROCESSING_FAILED: case ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED:
return "ERROR_CODE_FRAME_PROCESSING_FAILED"; return "ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED";
default: default:
if (errorCode >= CUSTOM_ERROR_CODE_BASE) { if (errorCode >= CUSTOM_ERROR_CODE_BASE) {
return "custom error code"; return "custom error code";
......
...@@ -21,25 +21,26 @@ import com.google.android.exoplayer2.C; ...@@ -21,25 +21,26 @@ import com.google.android.exoplayer2.C;
* Thrown when an exception occurs while preparing an {@link Effect}, or applying an {@link Effect} * Thrown when an exception occurs while preparing an {@link Effect}, or applying an {@link Effect}
* to video frames. * to video frames.
*/ */
public final class FrameProcessingException extends Exception { public final class VideoFrameProcessingException extends Exception {
/** /**
* Wraps the given exception in a {@code FrameProcessingException} if it is not already a {@code * Wraps the given exception in a {@code VideoFrameProcessingException} if it is not already a
* FrameProcessingException} and returns the exception otherwise. * {@code VideoFrameProcessingException} and returns the exception otherwise.
*/ */
public static FrameProcessingException from(Exception exception) { public static VideoFrameProcessingException from(Exception exception) {
return from(exception, /* presentationTimeUs= */ C.TIME_UNSET); return from(exception, /* presentationTimeUs= */ C.TIME_UNSET);
} }
/** /**
* Wraps the given exception in a {@code FrameProcessingException} with the given timestamp if it * Wraps the given exception in a {@code VideoFrameProcessingException} with the given timestamp
* is not already a {@code FrameProcessingException} and returns the exception otherwise. * if it is not already a {@code VideoFrameProcessingException} and returns the exception
* otherwise.
*/ */
public static FrameProcessingException from(Exception exception, long presentationTimeUs) { public static VideoFrameProcessingException from(Exception exception, long presentationTimeUs) {
if (exception instanceof FrameProcessingException) { if (exception instanceof VideoFrameProcessingException) {
return (FrameProcessingException) exception; return (VideoFrameProcessingException) exception;
} else { } else {
return new FrameProcessingException(exception, presentationTimeUs); return new VideoFrameProcessingException(exception, presentationTimeUs);
} }
} }
...@@ -54,7 +55,7 @@ public final class FrameProcessingException extends Exception { ...@@ -54,7 +55,7 @@ public final class FrameProcessingException extends Exception {
* *
* @param message The detail message for this exception. * @param message The detail message for this exception.
*/ */
public FrameProcessingException(String message) { public VideoFrameProcessingException(String message) {
this(message, /* presentationTimeUs= */ C.TIME_UNSET); this(message, /* presentationTimeUs= */ C.TIME_UNSET);
} }
...@@ -64,7 +65,7 @@ public final class FrameProcessingException extends Exception { ...@@ -64,7 +65,7 @@ public final class FrameProcessingException extends Exception {
* @param message The detail message for this exception. * @param message The detail message for this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred. * @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/ */
public FrameProcessingException(String message, long presentationTimeUs) { public VideoFrameProcessingException(String message, long presentationTimeUs) {
super(message); super(message);
this.presentationTimeUs = presentationTimeUs; this.presentationTimeUs = presentationTimeUs;
} }
...@@ -75,7 +76,7 @@ public final class FrameProcessingException extends Exception { ...@@ -75,7 +76,7 @@ public final class FrameProcessingException extends Exception {
* @param message The detail message for this exception. * @param message The detail message for this exception.
* @param cause The cause of this exception. * @param cause The cause of this exception.
*/ */
public FrameProcessingException(String message, Throwable cause) { public VideoFrameProcessingException(String message, Throwable cause) {
this(message, cause, /* presentationTimeUs= */ C.TIME_UNSET); this(message, cause, /* presentationTimeUs= */ C.TIME_UNSET);
} }
...@@ -86,7 +87,7 @@ public final class FrameProcessingException extends Exception { ...@@ -86,7 +87,7 @@ public final class FrameProcessingException extends Exception {
* @param cause The cause of this exception. * @param cause The cause of this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred. * @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/ */
public FrameProcessingException(String message, Throwable cause, long presentationTimeUs) { public VideoFrameProcessingException(String message, Throwable cause, long presentationTimeUs) {
super(message, cause); super(message, cause);
this.presentationTimeUs = presentationTimeUs; this.presentationTimeUs = presentationTimeUs;
} }
...@@ -96,7 +97,7 @@ public final class FrameProcessingException extends Exception { ...@@ -96,7 +97,7 @@ public final class FrameProcessingException extends Exception {
* *
* @param cause The cause of this exception. * @param cause The cause of this exception.
*/ */
public FrameProcessingException(Throwable cause) { public VideoFrameProcessingException(Throwable cause) {
this(cause, /* presentationTimeUs= */ C.TIME_UNSET); this(cause, /* presentationTimeUs= */ C.TIME_UNSET);
} }
...@@ -106,7 +107,7 @@ public final class FrameProcessingException extends Exception { ...@@ -106,7 +107,7 @@ public final class FrameProcessingException extends Exception {
* @param cause The cause of this exception. * @param cause The cause of this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred. * @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/ */
public FrameProcessingException(Throwable cause, long presentationTimeUs) { public VideoFrameProcessingException(Throwable cause, long presentationTimeUs) {
super(cause); super(cause);
this.presentationTimeUs = presentationTimeUs; this.presentationTimeUs = presentationTimeUs;
} }
......
...@@ -25,7 +25,7 @@ import java.util.List; ...@@ -25,7 +25,7 @@ import java.util.List;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
/** /**
* Interface for a frame processor that applies changes to individual video frames. * Interface for a video frame processor that applies changes to individual video frames.
* *
* <p>The changes are specified by {@link Effect} instances passed to {@link Factory#create}. * <p>The changes are specified by {@link Effect} instances passed to {@link Factory#create}.
* *
...@@ -36,13 +36,13 @@ import java.util.concurrent.Executor; ...@@ -36,13 +36,13 @@ import java.util.concurrent.Executor;
* <p>The caller must {@linkplain #registerInputFrame() register} input frames before rendering them * <p>The caller must {@linkplain #registerInputFrame() register} input frames before rendering them
* to the input {@link Surface}. * to the input {@link Surface}.
*/ */
public interface FrameProcessor { public interface VideoFrameProcessor {
// TODO(b/243036513): Allow effects to be replaced. // TODO(b/243036513): Allow effects to be replaced.
/** A factory for {@link FrameProcessor} instances. */ /** A factory for {@link VideoFrameProcessor} instances. */
interface Factory { interface Factory {
/** /**
* Creates a new {@link FrameProcessor} instance. * Creates a new {@link VideoFrameProcessor} instance.
* *
* @param context A {@link Context}. * @param context A {@link Context}.
* @param effects The {@link Effect} instances to apply to each frame. Applied on the {@code * @param effects The {@link Effect} instances to apply to each frame. Applied on the {@code
...@@ -54,18 +54,18 @@ public interface FrameProcessor { ...@@ -54,18 +54,18 @@ public interface FrameProcessor {
* video) or not (e.g. from a {@link Bitmap}). See <a * video) or not (e.g. from a {@link Bitmap}). See <a
* href="https://source.android.com/docs/core/graphics/arch-st#ext_texture">the * href="https://source.android.com/docs/core/graphics/arch-st#ext_texture">the
* SurfaceTexture docs</a> for more information on external textures. * SurfaceTexture docs</a> for more information on external textures.
* @param releaseFramesAutomatically If {@code true}, the {@link FrameProcessor} will render * @param releaseFramesAutomatically If {@code true}, the instance will render output frames to
* output frames to the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} * the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as
* automatically as {@link FrameProcessor} is done processing them. If {@code false}, the * {@link VideoFrameProcessor} is done processing them. If {@code false}, the {@link
* {@link FrameProcessor} will block until {@link #releaseOutputFrame(long)} is called, to * VideoFrameProcessor} will block until {@link #releaseOutputFrame(long)} is called, to
* render or drop the frame. * render or drop the frame.
* @param executor The {@link Executor} on which the {@code listener} is invoked. * @param executor The {@link Executor} on which the {@code listener} is invoked.
* @param listener A {@link Listener}. * @param listener A {@link Listener}.
* @return A new instance. * @return A new instance.
* @throws FrameProcessingException If a problem occurs while creating the {@link * @throws VideoFrameProcessingException If a problem occurs while creating the {@link
* FrameProcessor}. * VideoFrameProcessor}.
*/ */
FrameProcessor create( VideoFrameProcessor create(
Context context, Context context,
List<Effect> effects, List<Effect> effects,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
...@@ -75,7 +75,7 @@ public interface FrameProcessor { ...@@ -75,7 +75,7 @@ public interface FrameProcessor {
boolean releaseFramesAutomatically, boolean releaseFramesAutomatically,
Executor executor, Executor executor,
Listener listener) Listener listener)
throws FrameProcessingException; throws VideoFrameProcessingException;
} }
/** /**
...@@ -105,15 +105,15 @@ public interface FrameProcessor { ...@@ -105,15 +105,15 @@ public interface FrameProcessor {
void onOutputFrameAvailable(long presentationTimeUs); void onOutputFrameAvailable(long presentationTimeUs);
/** /**
* Called when an exception occurs during asynchronous frame processing. * Called when an exception occurs during asynchronous video frame processing.
* *
* <p>If an error occurred, consuming and producing further frames will not work as expected and * <p>If an error occurred, consuming and producing further frames will not work as expected and
* the {@link FrameProcessor} should be released. * the {@link VideoFrameProcessor} should be released.
*/ */
void onFrameProcessingError(FrameProcessingException exception); void onError(VideoFrameProcessingException exception);
/** Called after the {@link FrameProcessor} has produced its final output frame. */ /** Called after the {@link VideoFrameProcessor} has produced its final output frame. */
void onFrameProcessingEnded(); void onEnded();
} }
/** /**
...@@ -126,14 +126,14 @@ public interface FrameProcessor { ...@@ -126,14 +126,14 @@ public interface FrameProcessor {
long DROP_OUTPUT_FRAME = -2; long DROP_OUTPUT_FRAME = -2;
/** /**
* Provides an input {@link Bitmap} to the {@link FrameProcessor}. * Provides an input {@link Bitmap} to the {@code VideoFrameProcessor}.
* *
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code * <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code false}. * isInputTextureExternal} parameter is set to {@code false}.
* *
* <p>Can be called on any thread. * <p>Can be called on any thread.
* *
* @param inputBitmap The {@link Bitmap} queued to the {@link FrameProcessor}. * @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds. * @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per * @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
* second. * second.
...@@ -143,9 +143,10 @@ public interface FrameProcessor { ...@@ -143,9 +143,10 @@ public interface FrameProcessor {
void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate); void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate);
/** /**
* Returns the input {@link Surface}, where {@link FrameProcessor} consumes input frames from. * Returns the input {@link Surface}, where {@code VideoFrameProcessor} consumes input frames
* from.
* *
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code * <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}. * isInputTextureExternal} parameter is set to {@code true}.
* *
* <p>Can be called on any thread. * <p>Can be called on any thread.
...@@ -170,11 +171,11 @@ public interface FrameProcessor { ...@@ -170,11 +171,11 @@ public interface FrameProcessor {
void setInputFrameInfo(FrameInfo inputFrameInfo); void setInputFrameInfo(FrameInfo inputFrameInfo);
/** /**
* Informs the {@code FrameProcessor} that a frame will be queued to its input surface. * Informs the {@code VideoFrameProcessor} that a frame will be queued to its input surface.
* *
* <p>Must be called before rendering a frame to the frame processor's input surface. * <p>Must be called before rendering a frame to the {@code VideoFrameProcessor}'s input surface.
* *
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code * <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}. * isInputTextureExternal} parameter is set to {@code true}.
* *
* <p>Can be called on any thread. * <p>Can be called on any thread.
...@@ -188,7 +189,7 @@ public interface FrameProcessor { ...@@ -188,7 +189,7 @@ public interface FrameProcessor {
* Returns the number of input frames that have been {@linkplain #registerInputFrame() registered} * Returns the number of input frames that have been {@linkplain #registerInputFrame() registered}
* but not processed off the {@linkplain #getInputSurface() input surface} yet. * but not processed off the {@linkplain #getInputSurface() input surface} yet.
* *
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code * <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}. * isInputTextureExternal} parameter is set to {@code true}.
* *
* <p>Can be called on any thread. * <p>Can be called on any thread.
...@@ -200,7 +201,7 @@ public interface FrameProcessor { ...@@ -200,7 +201,7 @@ public interface FrameProcessor {
* dropped, they will be rendered to this output {@link SurfaceInfo}. * dropped, they will be rendered to this output {@link SurfaceInfo}.
* *
* <p>The new output {@link SurfaceInfo} is applied from the next output frame rendered onwards. * <p>The new output {@link SurfaceInfo} is applied from the next output frame rendered onwards.
* If the output {@link SurfaceInfo} is {@code null}, the {@code FrameProcessor} will stop * If the output {@link SurfaceInfo} is {@code null}, the {@code VideoFrameProcessor} will stop
* rendering pending frames and resume rendering once a non-null {@link SurfaceInfo} is set. * rendering pending frames and resume rendering once a non-null {@link SurfaceInfo} is set.
* *
* <p>If the dimensions given in {@link SurfaceInfo} do not match the {@linkplain * <p>If the dimensions given in {@link SurfaceInfo} do not match the {@linkplain
...@@ -234,7 +235,7 @@ public interface FrameProcessor { ...@@ -234,7 +235,7 @@ public interface FrameProcessor {
void releaseOutputFrame(long releaseTimeNs); void releaseOutputFrame(long releaseTimeNs);
/** /**
* Informs the {@code FrameProcessor} that no further input frames should be accepted. * Informs the {@code VideoFrameProcessor} that no further input frames should be accepted.
* *
* <p>Can be called on any thread. * <p>Can be called on any thread.
* *
...@@ -243,12 +244,12 @@ public interface FrameProcessor { ...@@ -243,12 +244,12 @@ public interface FrameProcessor {
void signalEndOfInput(); void signalEndOfInput();
/** /**
* Flushes the {@code FrameProcessor}. * Flushes the {@code VideoFrameProcessor}.
* *
* <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this * <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this
* method are no longer considered to be registered when this method returns. * method are no longer considered to be registered when this method returns.
* *
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code * <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}. * isInputTextureExternal} parameter is set to {@code true}.
* *
* <p>{@link Listener} methods invoked prior to calling this method should be ignored. * <p>{@link Listener} methods invoked prior to calling this method should be ignored.
...@@ -258,10 +259,9 @@ public interface FrameProcessor { ...@@ -258,10 +259,9 @@ public interface FrameProcessor {
/** /**
* Releases all resources. * Releases all resources.
* *
* <p>If the frame processor is released before it has {@linkplain * <p>If the {@code VideoFrameProcessor} is released before it has {@linkplain Listener#onEnded()
* Listener#onFrameProcessingEnded() ended}, it will attempt to cancel processing any input frames * ended}, it will attempt to cancel processing any input frames that have already become
* that have already become available. Input frames that become available after release are * available. Input frames that become available after release are ignored.
* ignored.
* *
* <p>This method blocks until all resources are released or releasing times out. * <p>This method blocks until all resources are released or releasing times out.
* *
......
...@@ -35,9 +35,9 @@ import android.opengl.EGLDisplay; ...@@ -35,9 +35,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
...@@ -50,7 +50,7 @@ import org.junit.runner.RunWith; ...@@ -50,7 +50,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public class ContrastPixelTest { public class ContrastPixelTest {
...@@ -89,7 +89,7 @@ public class ContrastPixelTest { ...@@ -89,7 +89,7 @@ public class ContrastPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (contrastShaderProgram != null) { if (contrastShaderProgram != null) {
contrastShaderProgram.release(); contrastShaderProgram.release();
} }
...@@ -198,7 +198,7 @@ public class ContrastPixelTest { ...@@ -198,7 +198,7 @@ public class ContrastPixelTest {
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE); assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
} }
private void setupOutputTexture(int outputWidth, int outputHeight) throws GlUtil.GlException { private void setupOutputTexture(int outputWidth, int outputHeight) throws Exception {
int outputTexId = int outputTexId =
GlUtil.createTexture( GlUtil.createTexture(
outputWidth, outputHeight, /* useHighPrecisionColorComponents= */ false); outputWidth, outputHeight, /* useHighPrecisionColorComponents= */ false);
......
...@@ -32,9 +32,9 @@ import android.opengl.EGLDisplay; ...@@ -32,9 +32,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException; import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After; import org.junit.After;
...@@ -48,7 +48,7 @@ import org.junit.runner.RunWith; ...@@ -48,7 +48,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class CropPixelTest { public final class CropPixelTest {
...@@ -82,7 +82,7 @@ public final class CropPixelTest { ...@@ -82,7 +82,7 @@ public final class CropPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (cropShaderProgram != null) { if (cropShaderProgram != null) {
cropShaderProgram.release(); cropShaderProgram.release();
} }
......
...@@ -34,9 +34,9 @@ import android.opengl.EGLDisplay; ...@@ -34,9 +34,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException; import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After; import org.junit.After;
...@@ -50,7 +50,7 @@ import org.junit.runner.RunWith; ...@@ -50,7 +50,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class HslAdjustmentPixelTest { public final class HslAdjustmentPixelTest {
...@@ -100,7 +100,7 @@ public final class HslAdjustmentPixelTest { ...@@ -100,7 +100,7 @@ public final class HslAdjustmentPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (hslProcessor != null) { if (hslProcessor != null) {
hslProcessor.release(); hslProcessor.release();
} }
......
...@@ -32,8 +32,8 @@ import android.opengl.EGLDisplay; ...@@ -32,8 +32,8 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException; import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After; import org.junit.After;
...@@ -47,7 +47,7 @@ import org.junit.runner.RunWith; ...@@ -47,7 +47,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class MatrixShaderProgramPixelTest { public final class MatrixShaderProgramPixelTest {
...@@ -87,7 +87,7 @@ public final class MatrixShaderProgramPixelTest { ...@@ -87,7 +87,7 @@ public final class MatrixShaderProgramPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) { if (matrixShaderProgram != null) {
matrixShaderProgram.release(); matrixShaderProgram.release();
} }
......
...@@ -37,9 +37,9 @@ import android.text.SpannableString; ...@@ -37,9 +37,9 @@ import android.text.SpannableString;
import android.text.style.ForegroundColorSpan; import android.text.style.ForegroundColorSpan;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.io.IOException; import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
...@@ -54,7 +54,7 @@ import org.junit.runner.RunWith; ...@@ -54,7 +54,7 @@ import org.junit.runner.RunWith;
* <p>Expected bitmaps are taken from an emulator, so tests on different emulators or physical * <p>Expected bitmaps are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public class OverlayShaderProgramPixelTest { public class OverlayShaderProgramPixelTest {
...@@ -101,7 +101,7 @@ public class OverlayShaderProgramPixelTest { ...@@ -101,7 +101,7 @@ public class OverlayShaderProgramPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (overlayShaderProgram != null) { if (overlayShaderProgram != null) {
overlayShaderProgram.release(); overlayShaderProgram.release();
} }
......
...@@ -33,9 +33,9 @@ import android.opengl.EGLSurface; ...@@ -33,9 +33,9 @@ import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException; import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After; import org.junit.After;
...@@ -49,7 +49,7 @@ import org.junit.runner.RunWith; ...@@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class PresentationPixelTest { public final class PresentationPixelTest {
...@@ -91,7 +91,7 @@ public final class PresentationPixelTest { ...@@ -91,7 +91,7 @@ public final class PresentationPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (presentationShaderProgram != null) { if (presentationShaderProgram != null) {
presentationShaderProgram.release(); presentationShaderProgram.release();
} }
......
...@@ -35,9 +35,9 @@ import android.opengl.EGLDisplay; ...@@ -35,9 +35,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.io.IOException; import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
...@@ -52,7 +52,7 @@ import org.junit.runner.RunWith; ...@@ -52,7 +52,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class RgbAdjustmentPixelTest { public final class RgbAdjustmentPixelTest {
...@@ -99,7 +99,7 @@ public final class RgbAdjustmentPixelTest { ...@@ -99,7 +99,7 @@ public final class RgbAdjustmentPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) { if (matrixShaderProgram != null) {
matrixShaderProgram.release(); matrixShaderProgram.release();
} }
......
...@@ -33,9 +33,9 @@ import android.opengl.EGLDisplay; ...@@ -33,9 +33,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException; import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After; import org.junit.After;
...@@ -49,7 +49,7 @@ import org.junit.runner.RunWith; ...@@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class RgbFilterPixelTest { public final class RgbFilterPixelTest {
...@@ -94,7 +94,7 @@ public final class RgbFilterPixelTest { ...@@ -94,7 +94,7 @@ public final class RgbFilterPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) { if (matrixShaderProgram != null) {
matrixShaderProgram.release(); matrixShaderProgram.release();
} }
......
...@@ -34,9 +34,9 @@ import android.opengl.EGLDisplay; ...@@ -34,9 +34,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
...@@ -49,7 +49,7 @@ import org.junit.runner.RunWith; ...@@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public class SingleColorLutPixelTest { public class SingleColorLutPixelTest {
...@@ -88,7 +88,7 @@ public class SingleColorLutPixelTest { ...@@ -88,7 +88,7 @@ public class SingleColorLutPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (colorLutShaderProgram != null) { if (colorLutShaderProgram != null) {
colorLutShaderProgram.release(); colorLutShaderProgram.release();
} }
......
...@@ -22,9 +22,9 @@ import android.net.Uri; ...@@ -22,9 +22,9 @@ import android.net.Uri;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLUtils; import android.opengl.GLUtils;
import com.google.android.exoplayer2.util.BitmapLoader; import com.google.android.exoplayer2.util.BitmapLoader;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
...@@ -42,9 +42,9 @@ public abstract class BitmapOverlay extends TextureOverlay { ...@@ -42,9 +42,9 @@ public abstract class BitmapOverlay extends TextureOverlay {
* Returns the overlay bitmap displayed at the specified timestamp. * Returns the overlay bitmap displayed at the specified timestamp.
* *
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds. * @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame. * @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/ */
public abstract Bitmap getBitmap(long presentationTimeUs) throws FrameProcessingException; public abstract Bitmap getBitmap(long presentationTimeUs) throws VideoFrameProcessingException;
/** /**
* {@inheritDoc} * {@inheritDoc}
...@@ -59,7 +59,7 @@ public abstract class BitmapOverlay extends TextureOverlay { ...@@ -59,7 +59,7 @@ public abstract class BitmapOverlay extends TextureOverlay {
} }
@Override @Override
public int getTextureId(long presentationTimeUs) throws FrameProcessingException { public int getTextureId(long presentationTimeUs) throws VideoFrameProcessingException {
Bitmap bitmap = getBitmap(presentationTimeUs); Bitmap bitmap = getBitmap(presentationTimeUs);
if (bitmap != lastBitmap) { if (bitmap != lastBitmap) {
try { try {
...@@ -77,7 +77,7 @@ public abstract class BitmapOverlay extends TextureOverlay { ...@@ -77,7 +77,7 @@ public abstract class BitmapOverlay extends TextureOverlay {
/* border= */ 0); /* border= */ 0);
GlUtil.checkGlError(); GlUtil.checkGlError();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
return lastTextureId; return lastTextureId;
...@@ -132,14 +132,14 @@ public abstract class BitmapOverlay extends TextureOverlay { ...@@ -132,14 +132,14 @@ public abstract class BitmapOverlay extends TextureOverlay {
private @MonotonicNonNull Bitmap lastBitmap; private @MonotonicNonNull Bitmap lastBitmap;
@Override @Override
public Bitmap getBitmap(long presentationTimeUs) throws FrameProcessingException { public Bitmap getBitmap(long presentationTimeUs) throws VideoFrameProcessingException {
if (lastBitmap == null) { if (lastBitmap == null) {
BitmapLoader bitmapLoader = new SimpleBitmapLoader(); BitmapLoader bitmapLoader = new SimpleBitmapLoader();
ListenableFuture<Bitmap> future = bitmapLoader.loadBitmap(overlayBitmapUri); ListenableFuture<Bitmap> future = bitmapLoader.loadBitmap(overlayBitmapUri);
try { try {
lastBitmap = future.get(); lastBitmap = future.get();
} catch (ExecutionException | InterruptedException e) { } catch (ExecutionException | InterruptedException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
return lastBitmap; return lastBitmap;
......
...@@ -35,7 +35,7 @@ import java.util.Queue; ...@@ -35,7 +35,7 @@ import java.util.Queue;
private final GlShaderProgram producingGlShaderProgram; private final GlShaderProgram producingGlShaderProgram;
private final GlShaderProgram consumingGlShaderProgram; private final GlShaderProgram consumingGlShaderProgram;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
@GuardedBy("this") @GuardedBy("this")
private final Queue<Pair<TextureInfo, Long>> availableFrames; private final Queue<Pair<TextureInfo, Long>> availableFrames;
...@@ -50,18 +50,18 @@ import java.util.Queue; ...@@ -50,18 +50,18 @@ import java.util.Queue;
* as {@link OutputListener}. * as {@link OutputListener}.
* @param consumingGlShaderProgram The {@link GlShaderProgram} for which this listener will be set * @param consumingGlShaderProgram The {@link GlShaderProgram} for which this listener will be set
* as {@link InputListener}. * as {@link InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor} that is used for * @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor} that is
* OpenGL calls. All calls to the producing/consuming {@link GlShaderProgram} will be executed * used for OpenGL calls. All calls to the producing/consuming {@link GlShaderProgram} will be
* by the {@link FrameProcessingTaskExecutor}. The caller is responsible for releasing the * executed by the {@link VideoFrameProcessingTaskExecutor}. The caller is responsible for
* {@link FrameProcessingTaskExecutor}. * releasing the {@link VideoFrameProcessingTaskExecutor}.
*/ */
public ChainingGlShaderProgramListener( public ChainingGlShaderProgramListener(
GlShaderProgram producingGlShaderProgram, GlShaderProgram producingGlShaderProgram,
GlShaderProgram consumingGlShaderProgram, GlShaderProgram consumingGlShaderProgram,
FrameProcessingTaskExecutor frameProcessingTaskExecutor) { VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.producingGlShaderProgram = producingGlShaderProgram; this.producingGlShaderProgram = producingGlShaderProgram;
this.consumingGlShaderProgram = consumingGlShaderProgram; this.consumingGlShaderProgram = consumingGlShaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor; this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
availableFrames = new ArrayDeque<>(); availableFrames = new ArrayDeque<>();
} }
...@@ -75,9 +75,10 @@ import java.util.Queue; ...@@ -75,9 +75,10 @@ import java.util.Queue;
long presentationTimeUs = pendingFrame.second; long presentationTimeUs = pendingFrame.second;
if (presentationTimeUs == C.TIME_END_OF_SOURCE) { if (presentationTimeUs == C.TIME_END_OF_SOURCE) {
frameProcessingTaskExecutor.submit(consumingGlShaderProgram::signalEndOfCurrentInputStream); videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
} else { } else {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> () ->
consumingGlShaderProgram.queueInputFrame( consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ pendingFrame.first, presentationTimeUs)); /* inputTexture= */ pendingFrame.first, presentationTimeUs));
...@@ -86,7 +87,7 @@ import java.util.Queue; ...@@ -86,7 +87,7 @@ import java.util.Queue;
@Override @Override
public void onInputFrameProcessed(TextureInfo inputTexture) { public void onInputFrameProcessed(TextureInfo inputTexture) {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> producingGlShaderProgram.releaseOutputFrame(inputTexture)); () -> producingGlShaderProgram.releaseOutputFrame(inputTexture));
} }
...@@ -94,14 +95,14 @@ import java.util.Queue; ...@@ -94,14 +95,14 @@ import java.util.Queue;
public synchronized void onFlush() { public synchronized void onFlush() {
consumingGlShaderProgramInputCapacity = 0; consumingGlShaderProgramInputCapacity = 0;
availableFrames.clear(); availableFrames.clear();
frameProcessingTaskExecutor.submit(producingGlShaderProgram::flush); videoFrameProcessingTaskExecutor.submit(producingGlShaderProgram::flush);
} }
@Override @Override
public synchronized void onOutputFrameAvailable( public synchronized void onOutputFrameAvailable(
TextureInfo outputTexture, long presentationTimeUs) { TextureInfo outputTexture, long presentationTimeUs) {
if (consumingGlShaderProgramInputCapacity > 0) { if (consumingGlShaderProgramInputCapacity > 0) {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> () ->
consumingGlShaderProgram.queueInputFrame( consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ outputTexture, presentationTimeUs)); /* inputTexture= */ outputTexture, presentationTimeUs));
...@@ -116,7 +117,8 @@ import java.util.Queue; ...@@ -116,7 +117,8 @@ import java.util.Queue;
if (!availableFrames.isEmpty()) { if (!availableFrames.isEmpty()) {
availableFrames.add(new Pair<>(TextureInfo.UNSET, C.TIME_END_OF_SOURCE)); availableFrames.add(new Pair<>(TextureInfo.UNSET, C.TIME_END_OF_SOURCE));
} else { } else {
frameProcessingTaskExecutor.submit(consumingGlShaderProgram::signalEndOfCurrentInputStream); videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
} }
} }
} }
...@@ -18,8 +18,8 @@ package com.google.android.exoplayer2.effect; ...@@ -18,8 +18,8 @@ package com.google.android.exoplayer2.effect;
import android.content.Context; import android.content.Context;
import androidx.annotation.WorkerThread; import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/** /**
* Specifies color transformations using color lookup tables to apply to each frame in the fragment * Specifies color transformations using color lookup tables to apply to each frame in the fragment
...@@ -43,7 +43,7 @@ public interface ColorLut extends GlEffect { ...@@ -43,7 +43,7 @@ public interface ColorLut extends GlEffect {
@Override @Override
@WorkerThread @WorkerThread
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr); return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr);
} }
} }
...@@ -20,10 +20,10 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument; ...@@ -20,10 +20,10 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram; import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException; import java.io.IOException;
/** Applies a {@link ColorLut} to each frame in the fragment shader. */ /** Applies a {@link ColorLut} to each frame in the fragment shader. */
...@@ -41,10 +41,10 @@ import java.io.IOException; ...@@ -41,10 +41,10 @@ import java.io.IOException;
* @param colorLut The {@link ColorLut} to apply to each frame in order. * @param colorLut The {@link ColorLut} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files. * @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/ */
public ColorLutShaderProgram(Context context, ColorLut colorLut, boolean useHdr) public ColorLutShaderProgram(Context context, ColorLut colorLut, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
super(useHdr); super(useHdr);
// TODO(b/246315245): Add HDR support. // TODO(b/246315245): Add HDR support.
checkArgument(!useHdr, "ColorLutShaderProgram does not support HDR colors."); checkArgument(!useHdr, "ColorLutShaderProgram does not support HDR colors.");
...@@ -53,7 +53,7 @@ import java.io.IOException; ...@@ -53,7 +53,7 @@ import java.io.IOException;
try { try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH); glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y. // Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
...@@ -73,7 +73,8 @@ import java.io.IOException; ...@@ -73,7 +73,8 @@ import java.io.IOException;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try { try {
glProgram.use(); glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
...@@ -84,18 +85,18 @@ import java.io.IOException; ...@@ -84,18 +85,18 @@ import java.io.IOException;
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
super.release(); super.release();
try { try {
colorLut.release(); colorLut.release();
glProgram.delete(); glProgram.delete();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
} }
...@@ -19,7 +19,7 @@ package com.google.android.exoplayer2.effect; ...@@ -19,7 +19,7 @@ package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.util.Assertions.checkArgument; import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import com.google.android.exoplayer2.util.FrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/** A {@link GlEffect} to control the contrast of video frames. */ /** A {@link GlEffect} to control the contrast of video frames. */
public class Contrast implements GlEffect { public class Contrast implements GlEffect {
...@@ -40,7 +40,7 @@ public class Contrast implements GlEffect { ...@@ -40,7 +40,7 @@ public class Contrast implements GlEffect {
@Override @Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return new ContrastShaderProgram(context, this, useHdr); return new ContrastShaderProgram(context, this, useHdr);
} }
} }
...@@ -18,10 +18,10 @@ package com.google.android.exoplayer2.effect; ...@@ -18,10 +18,10 @@ package com.google.android.exoplayer2.effect;
import android.content.Context; import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram; import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException; import java.io.IOException;
/** Applies a {@link Contrast} to each frame in the fragment shader. */ /** Applies a {@link Contrast} to each frame in the fragment shader. */
...@@ -38,10 +38,10 @@ import java.io.IOException; ...@@ -38,10 +38,10 @@ import java.io.IOException;
* @param contrastEffect The {@link Contrast} to apply to each frame in order. * @param contrastEffect The {@link Contrast} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files. * @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/ */
public ContrastShaderProgram(Context context, Contrast contrastEffect, boolean useHdr) public ContrastShaderProgram(Context context, Contrast contrastEffect, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
super(useHdr); super(useHdr);
// Use 1.0001f to avoid division by zero issues. // Use 1.0001f to avoid division by zero issues.
float contrastFactor = (1 + contrastEffect.contrast) / (1.0001f - contrastEffect.contrast); float contrastFactor = (1 + contrastEffect.contrast) / (1.0001f - contrastEffect.contrast);
...@@ -49,7 +49,7 @@ import java.io.IOException; ...@@ -49,7 +49,7 @@ import java.io.IOException;
try { try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH); glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y. // Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
...@@ -70,7 +70,8 @@ import java.io.IOException; ...@@ -70,7 +70,8 @@ import java.io.IOException;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try { try {
glProgram.use(); glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
...@@ -79,17 +80,17 @@ import java.io.IOException; ...@@ -79,17 +80,17 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad. // The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs); throw new VideoFrameProcessingException(e, presentationTimeUs);
} }
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
super.release(); super.release();
try { try {
glProgram.delete(); glProgram.delete();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
} }
...@@ -24,9 +24,9 @@ import androidx.annotation.WorkerThread; ...@@ -24,9 +24,9 @@ import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.effect.GlShaderProgram.InputListener; import com.google.android.exoplayer2.effect.GlShaderProgram.InputListener;
import com.google.android.exoplayer2.util.FrameInfo; import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import java.util.Queue; import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
...@@ -37,7 +37,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -37,7 +37,7 @@ import java.util.concurrent.atomic.AtomicInteger;
*/ */
/* package */ final class ExternalTextureManager implements InputListener { /* package */ final class ExternalTextureManager implements InputListener {
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private final ExternalShaderProgram externalShaderProgram; private final ExternalShaderProgram externalShaderProgram;
private final int externalTexId; private final int externalTexId;
private final Surface surface; private final Surface surface;
...@@ -61,7 +61,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -61,7 +61,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Nullable private volatile FrameInfo currentFrame; @Nullable private volatile FrameInfo currentFrame;
// TODO(b/238302341) Remove the use of after flush task, block the calling thread instead. // TODO(b/238302341) Remove the use of after flush task, block the calling thread instead.
@Nullable private volatile FrameProcessingTask onFlushCompleteTask; @Nullable private volatile VideoFrameProcessingTask onFlushCompleteTask;
private long previousStreamOffsetUs; private long previousStreamOffsetUs;
...@@ -70,21 +70,21 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -70,21 +70,21 @@ import java.util.concurrent.atomic.AtomicInteger;
* *
* @param externalShaderProgram The {@link ExternalShaderProgram} for which this {@code * @param externalShaderProgram The {@link ExternalShaderProgram} for which this {@code
* ExternalTextureManager} will be set as the {@link InputListener}. * ExternalTextureManager} will be set as the {@link InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor}. * @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor}.
* @throws FrameProcessingException If a problem occurs while creating the external texture. * @throws VideoFrameProcessingException If a problem occurs while creating the external texture.
*/ */
// The onFrameAvailableListener will not be invoked until the constructor returns. // The onFrameAvailableListener will not be invoked until the constructor returns.
@SuppressWarnings("nullness:method.invocation.invalid") @SuppressWarnings("nullness:method.invocation.invalid")
public ExternalTextureManager( public ExternalTextureManager(
ExternalShaderProgram externalShaderProgram, ExternalShaderProgram externalShaderProgram,
FrameProcessingTaskExecutor frameProcessingTaskExecutor) VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor)
throws FrameProcessingException { throws VideoFrameProcessingException {
this.externalShaderProgram = externalShaderProgram; this.externalShaderProgram = externalShaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor; this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
try { try {
externalTexId = GlUtil.createExternalTexture(); externalTexId = GlUtil.createExternalTexture();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
surfaceTexture = new SurfaceTexture(externalTexId); surfaceTexture = new SurfaceTexture(externalTexId);
textureTransformMatrix = new float[16]; textureTransformMatrix = new float[16];
...@@ -93,7 +93,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -93,7 +93,7 @@ import java.util.concurrent.atomic.AtomicInteger;
previousStreamOffsetUs = C.TIME_UNSET; previousStreamOffsetUs = C.TIME_UNSET;
surfaceTexture.setOnFrameAvailableListener( surfaceTexture.setOnFrameAvailableListener(
unused -> unused ->
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
if (numberOfFramesToDropOnBecomingAvailable > 0) { if (numberOfFramesToDropOnBecomingAvailable > 0) {
numberOfFramesToDropOnBecomingAvailable--; numberOfFramesToDropOnBecomingAvailable--;
...@@ -119,7 +119,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -119,7 +119,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Override @Override
public void onReadyToAcceptInputFrame() { public void onReadyToAcceptInputFrame() {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
externalShaderProgramInputCapacity.incrementAndGet(); externalShaderProgramInputCapacity.incrementAndGet();
maybeQueueFrameToExternalShaderProgram(); maybeQueueFrameToExternalShaderProgram();
...@@ -128,7 +128,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -128,7 +128,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Override @Override
public void onInputFrameProcessed(TextureInfo inputTexture) { public void onInputFrameProcessed(TextureInfo inputTexture) {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
currentFrame = null; currentFrame = null;
maybeQueueFrameToExternalShaderProgram(); maybeQueueFrameToExternalShaderProgram();
...@@ -136,13 +136,13 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -136,13 +136,13 @@ import java.util.concurrent.atomic.AtomicInteger;
} }
/** Sets the task to run on completing flushing, or {@code null} to clear any task. */ /** Sets the task to run on completing flushing, or {@code null} to clear any task. */
public void setOnFlushCompleteListener(@Nullable FrameProcessingTask task) { public void setOnFlushCompleteListener(@Nullable VideoFrameProcessingTask task) {
onFlushCompleteTask = task; onFlushCompleteTask = task;
} }
@Override @Override
public void onFlush() { public void onFlush() {
frameProcessingTaskExecutor.submit(this::flush); videoFrameProcessingTaskExecutor.submit(this::flush);
} }
/** /**
...@@ -169,10 +169,10 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -169,10 +169,10 @@ import java.util.concurrent.atomic.AtomicInteger;
/** /**
* Signals the end of the input. * Signals the end of the input.
* *
* @see FrameProcessor#signalEndOfInput() * @see VideoFrameProcessor#signalEndOfInput()
*/ */
public void signalEndOfInput() { public void signalEndOfInput() {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
inputStreamEnded = true; inputStreamEnded = true;
if (pendingFrames.isEmpty() && currentFrame == null) { if (pendingFrames.isEmpty() && currentFrame == null) {
...@@ -204,7 +204,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -204,7 +204,7 @@ import java.util.concurrent.atomic.AtomicInteger;
if (onFlushCompleteTask == null || numberOfFramesToDropOnBecomingAvailable > 0) { if (onFlushCompleteTask == null || numberOfFramesToDropOnBecomingAvailable > 0) {
return; return;
} }
frameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask); videoFrameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask);
} }
@WorkerThread @WorkerThread
......
...@@ -19,13 +19,13 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument; ...@@ -19,13 +19,13 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import androidx.annotation.IntRange; import androidx.annotation.IntRange;
import com.google.android.exoplayer2.util.FrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/** /**
* Caches the input frames. * Caches the input frames.
* *
* <p>Example usage: cache the processed frames when presenting them on screen, to accommodate for * <p>Example usage: cache the processed frames when presenting them on screen, to accommodate for
* the possible fluctuation in frame processing time between frames. * the possible fluctuation in video frame processing time between frames.
*/ */
public final class FrameCache implements GlEffect { public final class FrameCache implements GlEffect {
/** The capacity of the frame cache. */ /** The capacity of the frame cache. */
...@@ -49,7 +49,7 @@ public final class FrameCache implements GlEffect { ...@@ -49,7 +49,7 @@ public final class FrameCache implements GlEffect {
@Override @Override
public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return new FrameCacheShaderProgram(context, capacity, useHdr); return new FrameCacheShaderProgram(context, capacity, useHdr);
} }
} }
...@@ -19,9 +19,9 @@ import static com.google.android.exoplayer2.util.Assertions.checkState; ...@@ -19,9 +19,9 @@ import static com.google.android.exoplayer2.util.Assertions.checkState;
import android.content.Context; import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram; import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException; import java.io.IOException;
...@@ -54,7 +54,7 @@ import java.util.concurrent.Executor; ...@@ -54,7 +54,7 @@ import java.util.concurrent.Executor;
/** Creates a new instance. */ /** Creates a new instance. */
public FrameCacheShaderProgram(Context context, int capacity, boolean useHdr) public FrameCacheShaderProgram(Context context, int capacity, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
freeOutputTextures = new ArrayDeque<>(); freeOutputTextures = new ArrayDeque<>();
inUseOutputTextures = new ArrayDeque<>(); inUseOutputTextures = new ArrayDeque<>();
try { try {
...@@ -64,7 +64,7 @@ import java.util.concurrent.Executor; ...@@ -64,7 +64,7 @@ import java.util.concurrent.Executor;
VERTEX_SHADER_TRANSFORMATION_ES2_PATH, VERTEX_SHADER_TRANSFORMATION_ES2_PATH,
FRAGMENT_SHADER_TRANSFORMATION_ES2_PATH); FRAGMENT_SHADER_TRANSFORMATION_ES2_PATH);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw FrameProcessingException.from(e); throw VideoFrameProcessingException.from(e);
} }
this.capacity = capacity; this.capacity = capacity;
this.useHdr = useHdr; this.useHdr = useHdr;
...@@ -80,7 +80,7 @@ import java.util.concurrent.Executor; ...@@ -80,7 +80,7 @@ import java.util.concurrent.Executor;
inputListener = new InputListener() {}; inputListener = new InputListener() {};
outputListener = new OutputListener() {}; outputListener = new OutputListener() {};
errorListener = frameProcessingException -> {}; errorListener = videoFrameProcessingException -> {};
errorListenerExecutor = MoreExecutors.directExecutor(); errorListenerExecutor = MoreExecutors.directExecutor();
} }
...@@ -129,7 +129,7 @@ import java.util.concurrent.Executor; ...@@ -129,7 +129,7 @@ import java.util.concurrent.Executor;
outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs); outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs);
} catch (GlUtil.GlException | NoSuchElementException e) { } catch (GlUtil.GlException | NoSuchElementException e) {
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(FrameProcessingException.from(e))); () -> errorListener.onError(VideoFrameProcessingException.from(e)));
} }
} }
...@@ -167,11 +167,11 @@ import java.util.concurrent.Executor; ...@@ -167,11 +167,11 @@ import java.util.concurrent.Executor;
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
try { try {
deleteAllOutputTextures(); deleteAllOutputTextures();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
......
...@@ -17,7 +17,7 @@ package com.google.android.exoplayer2.effect; ...@@ -17,7 +17,7 @@ package com.google.android.exoplayer2.effect;
import android.content.Context; import android.content.Context;
import com.google.android.exoplayer2.util.Effect; import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/** /**
* Interface for a video frame effect with a {@link GlShaderProgram} implementation. * Interface for a video frame effect with a {@link GlShaderProgram} implementation.
...@@ -34,10 +34,11 @@ public interface GlEffect extends Effect { ...@@ -34,10 +34,11 @@ public interface GlEffect extends Effect {
* @param context A {@link Context}. * @param context A {@link Context}.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If an error occurs while creating the {@link GlShaderProgram}. * @throws VideoFrameProcessingException If an error occurs while creating the {@link
* GlShaderProgram}.
*/ */
GlShaderProgram toGlShaderProgram(Context context, boolean useHdr) GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException; throws VideoFrameProcessingException;
/** /**
* Returns whether a {@link GlEffect} applies no change at every timestamp. * Returns whether a {@link GlEffect} applies no change at every timestamp.
......
...@@ -17,8 +17,8 @@ package com.google.android.exoplayer2.effect; ...@@ -17,8 +17,8 @@ package com.google.android.exoplayer2.effect;
import android.content.Context; import android.content.Context;
import android.opengl.Matrix; import android.opengl.Matrix;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
/** /**
...@@ -52,7 +52,7 @@ public interface GlMatrixTransformation extends GlEffect { ...@@ -52,7 +52,7 @@ public interface GlMatrixTransformation extends GlEffect {
@Override @Override
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return MatrixShaderProgram.create( return MatrixShaderProgram.create(
context, context,
/* matrixTransformations= */ ImmutableList.of(this), /* matrixTransformations= */ ImmutableList.of(this),
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
*/ */
package com.google.android.exoplayer2.effect; package com.google.android.exoplayer2.effect;
import com.google.android.exoplayer2.util.FrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
/** /**
...@@ -45,7 +45,7 @@ import java.util.concurrent.Executor; ...@@ -45,7 +45,7 @@ import java.util.concurrent.Executor;
public interface GlShaderProgram { public interface GlShaderProgram {
/** /**
* Listener for input-related frame processing events. * Listener for input-related video frame processing events.
* *
* <p>This listener can be called from any thread. * <p>This listener can be called from any thread.
*/ */
...@@ -79,7 +79,7 @@ public interface GlShaderProgram { ...@@ -79,7 +79,7 @@ public interface GlShaderProgram {
} }
/** /**
* Listener for output-related frame processing events. * Listener for output-related video frame processing events.
* *
* <p>This listener can be called from any thread. * <p>This listener can be called from any thread.
*/ */
...@@ -106,26 +106,26 @@ public interface GlShaderProgram { ...@@ -106,26 +106,26 @@ public interface GlShaderProgram {
} }
/** /**
* Listener for frame processing errors. * Listener for video frame processing errors.
* *
* <p>This listener can be called from any thread. * <p>This listener can be called from any thread.
*/ */
interface ErrorListener { interface ErrorListener {
/** /**
* Called when an exception occurs during asynchronous frame processing. * Called when an exception occurs during asynchronous video frame processing.
* *
* <p>If an error occurred, consuming and producing further frames will not work as expected and * <p>If an error occurred, consuming and producing further frames will not work as expected and
* the {@link GlShaderProgram} should be released. * the {@link GlShaderProgram} should be released.
*/ */
void onFrameProcessingError(FrameProcessingException e); void onError(VideoFrameProcessingException e);
} }
/** /**
* Sets the {@link InputListener}. * Sets the {@link InputListener}.
* *
* <p>The {@link InputListener} should be invoked on the thread that owns the parent OpenGL * <p>The {@link InputListener} should be invoked on the thread that owns the parent OpenGL
* context. For example, {@link GlEffectsFrameProcessor} invokes the {@link InputListener} methods * context. For example, {@link DefaultVideoFrameProcessor} invokes the {@link InputListener}
* on its internal thread. * methods on its internal thread.
*/ */
void setInputListener(InputListener inputListener); void setInputListener(InputListener inputListener);
...@@ -133,7 +133,7 @@ public interface GlShaderProgram { ...@@ -133,7 +133,7 @@ public interface GlShaderProgram {
* Sets the {@link OutputListener}. * Sets the {@link OutputListener}.
* *
* <p>The {@link OutputListener} should be invoked on the thread that owns the parent OpenGL * <p>The {@link OutputListener} should be invoked on the thread that owns the parent OpenGL
* context. For example, {@link GlEffectsFrameProcessor} invokes the {@link OutputListener} * context. For example, {@link DefaultVideoFrameProcessor} invokes the {@link OutputListener}
* methods on its internal thread. * methods on its internal thread.
*/ */
void setOutputListener(OutputListener outputListener); void setOutputListener(OutputListener outputListener);
...@@ -188,7 +188,7 @@ public interface GlShaderProgram { ...@@ -188,7 +188,7 @@ public interface GlShaderProgram {
/** /**
* Releases all resources. * Releases all resources.
* *
* @throws FrameProcessingException If an error occurs while releasing resources. * @throws VideoFrameProcessingException If an error occurs while releasing resources.
*/ */
void release() throws FrameProcessingException; void release() throws VideoFrameProcessingException;
} }
...@@ -19,7 +19,7 @@ package com.google.android.exoplayer2.effect; ...@@ -19,7 +19,7 @@ package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.util.Assertions.checkArgument; import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import com.google.android.exoplayer2.util.FrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.CanIgnoreReturnValue;
/** Adjusts the HSL (Hue, Saturation, and Lightness) of a frame. */ /** Adjusts the HSL (Hue, Saturation, and Lightness) of a frame. */
...@@ -112,7 +112,7 @@ public class HslAdjustment implements GlEffect { ...@@ -112,7 +112,7 @@ public class HslAdjustment implements GlEffect {
@Override @Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return new HslShaderProgram(context, /* hslAdjustment= */ this, useHdr); return new HslShaderProgram(context, /* hslAdjustment= */ this, useHdr);
} }
} }
...@@ -20,10 +20,10 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument; ...@@ -20,10 +20,10 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram; import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException; import java.io.IOException;
/** Applies the {@link HslAdjustment} to each frame in the fragment shader. */ /** Applies the {@link HslAdjustment} to each frame in the fragment shader. */
...@@ -40,10 +40,10 @@ import java.io.IOException; ...@@ -40,10 +40,10 @@ import java.io.IOException;
* @param hslAdjustment The {@link HslAdjustment} to apply to each frame in order. * @param hslAdjustment The {@link HslAdjustment} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files. * @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/ */
public HslShaderProgram(Context context, HslAdjustment hslAdjustment, boolean useHdr) public HslShaderProgram(Context context, HslAdjustment hslAdjustment, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
super(useHdr); super(useHdr);
// TODO(b/241241680): Check if HDR <-> HSL works the same or not. // TODO(b/241241680): Check if HDR <-> HSL works the same or not.
checkArgument(!useHdr, "HDR is not yet supported."); checkArgument(!useHdr, "HDR is not yet supported.");
...@@ -51,7 +51,7 @@ import java.io.IOException; ...@@ -51,7 +51,7 @@ import java.io.IOException;
try { try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH); glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y. // Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
...@@ -78,7 +78,8 @@ import java.io.IOException; ...@@ -78,7 +78,8 @@ import java.io.IOException;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try { try {
glProgram.use(); glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
...@@ -87,7 +88,7 @@ import java.io.IOException; ...@@ -87,7 +88,7 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad. // The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs); throw new VideoFrameProcessingException(e, presentationTimeUs);
} }
} }
} }
...@@ -23,20 +23,21 @@ import android.opengl.GLES20; ...@@ -23,20 +23,21 @@ import android.opengl.GLES20;
import android.opengl.GLUtils; import android.opengl.GLUtils;
import androidx.annotation.WorkerThread; import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import java.util.Queue; import java.util.Queue;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
/** /**
* Forwards a frame produced from a {@link Bitmap} to a {@link GlShaderProgram} for consumption. * Forwards a video frame produced from a {@link Bitmap} to a {@link GlShaderProgram} for
* consumption.
* *
* <p>Methods in this class can be called from any thread. * <p>Methods in this class can be called from any thread.
*/ */
/* package */ final class InternalTextureManager implements GlShaderProgram.InputListener { /* package */ final class InternalTextureManager implements GlShaderProgram.InputListener {
private final GlShaderProgram shaderProgram; private final GlShaderProgram shaderProgram;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
// The queue holds all bitmaps with one or more frames pending to be sent downstream. // The queue holds all bitmaps with one or more frames pending to be sent downstream.
private final Queue<BitmapFrameSequenceInfo> pendingBitmaps; private final Queue<BitmapFrameSequenceInfo> pendingBitmaps;
...@@ -51,13 +52,14 @@ import java.util.concurrent.LinkedBlockingQueue; ...@@ -51,13 +52,14 @@ import java.util.concurrent.LinkedBlockingQueue;
* *
* @param shaderProgram The {@link GlShaderProgram} for which this {@code InternalTextureManager} * @param shaderProgram The {@link GlShaderProgram} for which this {@code InternalTextureManager}
* will be set as the {@link GlShaderProgram.InputListener}. * will be set as the {@link GlShaderProgram.InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor} that the methods of * @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor} that the
* this class run on. * methods of this class run on.
*/ */
public InternalTextureManager( public InternalTextureManager(
GlShaderProgram shaderProgram, FrameProcessingTaskExecutor frameProcessingTaskExecutor) { GlShaderProgram shaderProgram,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.shaderProgram = shaderProgram; this.shaderProgram = shaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor; this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
pendingBitmaps = new LinkedBlockingQueue<>(); pendingBitmaps = new LinkedBlockingQueue<>();
} }
...@@ -67,7 +69,7 @@ import java.util.concurrent.LinkedBlockingQueue; ...@@ -67,7 +69,7 @@ import java.util.concurrent.LinkedBlockingQueue;
// program and change to only allocate one texId at a time. A change to the // program and change to only allocate one texId at a time. A change to the
// onInputFrameProcessed() method signature to include presentationTimeUs will probably be // onInputFrameProcessed() method signature to include presentationTimeUs will probably be
// needed to do this. // needed to do this.
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
downstreamShaderProgramCapacity++; downstreamShaderProgramCapacity++;
maybeQueueToShaderProgram(); maybeQueueToShaderProgram();
...@@ -77,21 +79,21 @@ import java.util.concurrent.LinkedBlockingQueue; ...@@ -77,21 +79,21 @@ import java.util.concurrent.LinkedBlockingQueue;
/** /**
* Provides an input {@link Bitmap} to put into the video frames. * Provides an input {@link Bitmap} to put into the video frames.
* *
* @see FrameProcessor#queueInputBitmap * @see VideoFrameProcessor#queueInputBitmap
*/ */
public void queueInputBitmap( public void queueInputBitmap(
Bitmap inputBitmap, long durationUs, float frameRate, boolean useHdr) { Bitmap inputBitmap, long durationUs, float frameRate, boolean useHdr) {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> setupBitmap(inputBitmap, durationUs, frameRate, useHdr)); () -> setupBitmap(inputBitmap, durationUs, frameRate, useHdr));
} }
/** /**
* Signals the end of the input. * Signals the end of the input.
* *
* @see FrameProcessor#signalEndOfInput() * @see VideoFrameProcessor#signalEndOfInput()
*/ */
public void signalEndOfInput() { public void signalEndOfInput() {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
inputEnded = true; inputEnded = true;
maybeSignalEndOfOutput(); maybeSignalEndOfOutput();
...@@ -100,7 +102,7 @@ import java.util.concurrent.LinkedBlockingQueue; ...@@ -100,7 +102,7 @@ import java.util.concurrent.LinkedBlockingQueue;
@WorkerThread @WorkerThread
private void setupBitmap(Bitmap bitmap, long durationUs, float frameRate, boolean useHdr) private void setupBitmap(Bitmap bitmap, long durationUs, float frameRate, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
if (inputEnded) { if (inputEnded) {
return; return;
...@@ -114,7 +116,7 @@ import java.util.concurrent.LinkedBlockingQueue; ...@@ -114,7 +116,7 @@ import java.util.concurrent.LinkedBlockingQueue;
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
GlUtil.checkGlError(); GlUtil.checkGlError();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw FrameProcessingException.from(e); throw VideoFrameProcessingException.from(e);
} }
TextureInfo textureInfo = TextureInfo textureInfo =
new TextureInfo( new TextureInfo(
......
...@@ -23,10 +23,10 @@ import android.opengl.GLES20; ...@@ -23,10 +23,10 @@ import android.opengl.GLES20;
import android.opengl.Matrix; import android.opengl.Matrix;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram; import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.io.IOException; import java.io.IOException;
...@@ -141,15 +141,15 @@ import java.util.List; ...@@ -141,15 +141,15 @@ import java.util.List;
* @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be * @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be
* empty to apply no color transformations. * empty to apply no color transformations.
* @param useHdr Whether input and output colors are HDR. * @param useHdr Whether input and output colors are HDR.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL * @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* operation fails or is unsupported. * OpenGL operation fails or is unsupported.
*/ */
public static MatrixShaderProgram create( public static MatrixShaderProgram create(
Context context, Context context,
List<GlMatrixTransformation> matrixTransformations, List<GlMatrixTransformation> matrixTransformations,
List<RgbMatrix> rgbMatrices, List<RgbMatrix> rgbMatrices,
boolean useHdr) boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
GlProgram glProgram = GlProgram glProgram =
createGlProgram( createGlProgram(
context, VERTEX_SHADER_TRANSFORMATION_PATH, FRAGMENT_SHADER_TRANSFORMATION_PATH); context, VERTEX_SHADER_TRANSFORMATION_PATH, FRAGMENT_SHADER_TRANSFORMATION_PATH);
...@@ -183,8 +183,8 @@ import java.util.List; ...@@ -183,8 +183,8 @@ import java.util.List;
* @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}. * @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}.
* If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain * If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain
* ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not. * ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL * @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* operation fails or is unsupported. * OpenGL operation fails or is unsupported.
*/ */
public static MatrixShaderProgram createWithInternalSampler( public static MatrixShaderProgram createWithInternalSampler(
Context context, Context context,
...@@ -192,7 +192,7 @@ import java.util.List; ...@@ -192,7 +192,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices, List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo) ColorInfo outputColorInfo)
throws FrameProcessingException { throws VideoFrameProcessingException {
checkState( checkState(
!ColorInfo.isTransferHdr(inputColorInfo), !ColorInfo.isTransferHdr(inputColorInfo),
"MatrixShaderProgram doesn't support HDR internal sampler input yet."); "MatrixShaderProgram doesn't support HDR internal sampler input yet.");
...@@ -227,8 +227,8 @@ import java.util.List; ...@@ -227,8 +227,8 @@ import java.util.List;
* @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}. * @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}.
* If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain * If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain
* ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not. * ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL * @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* operation fails or is unsupported. * OpenGL operation fails or is unsupported.
*/ */
public static MatrixShaderProgram createWithExternalSampler( public static MatrixShaderProgram createWithExternalSampler(
Context context, Context context,
...@@ -236,7 +236,7 @@ import java.util.List; ...@@ -236,7 +236,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices, List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo) ColorInfo outputColorInfo)
throws FrameProcessingException { throws VideoFrameProcessingException {
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo); boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
String vertexShaderFilePath = String vertexShaderFilePath =
isInputTransferHdr isInputTransferHdr
...@@ -270,15 +270,15 @@ import java.util.List; ...@@ -270,15 +270,15 @@ import java.util.List;
* @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be * @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be
* empty to apply no color transformations. * empty to apply no color transformations.
* @param outputColorInfo The electrical (non-linear) {@link ColorInfo} describing output colors. * @param outputColorInfo The electrical (non-linear) {@link ColorInfo} describing output colors.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL * @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* operation fails or is unsupported. * OpenGL operation fails or is unsupported.
*/ */
public static MatrixShaderProgram createApplyingOetf( public static MatrixShaderProgram createApplyingOetf(
Context context, Context context,
List<GlMatrixTransformation> matrixTransformations, List<GlMatrixTransformation> matrixTransformations,
List<RgbMatrix> rgbMatrices, List<RgbMatrix> rgbMatrices,
ColorInfo outputColorInfo) ColorInfo outputColorInfo)
throws FrameProcessingException { throws VideoFrameProcessingException {
boolean outputIsHdr = ColorInfo.isTransferHdr(outputColorInfo); boolean outputIsHdr = ColorInfo.isTransferHdr(outputColorInfo);
String vertexShaderFilePath = String vertexShaderFilePath =
outputIsHdr ? VERTEX_SHADER_TRANSFORMATION_ES3_PATH : VERTEX_SHADER_TRANSFORMATION_PATH; outputIsHdr ? VERTEX_SHADER_TRANSFORMATION_ES3_PATH : VERTEX_SHADER_TRANSFORMATION_PATH;
...@@ -315,7 +315,7 @@ import java.util.List; ...@@ -315,7 +315,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices, List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo) ColorInfo outputColorInfo)
throws FrameProcessingException { throws VideoFrameProcessingException {
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo); boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
@C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer; @C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer;
if (isInputTransferHdr) { if (isInputTransferHdr) {
...@@ -323,7 +323,7 @@ import java.util.List; ...@@ -323,7 +323,7 @@ import java.util.List;
// In HDR editing mode the decoder output is sampled in YUV. // In HDR editing mode the decoder output is sampled in YUV.
if (!GlUtil.isYuvTargetExtensionSupported()) { if (!GlUtil.isYuvTargetExtensionSupported()) {
throw new FrameProcessingException( throw new VideoFrameProcessingException(
"The EXT_YUV_target extension is required for HDR editing input."); "The EXT_YUV_target extension is required for HDR editing input.");
} }
glProgram.setFloatsUniform( glProgram.setFloatsUniform(
...@@ -396,13 +396,13 @@ import java.util.List; ...@@ -396,13 +396,13 @@ import java.util.List;
private static GlProgram createGlProgram( private static GlProgram createGlProgram(
Context context, String vertexShaderFilePath, String fragmentShaderFilePath) Context context, String vertexShaderFilePath, String fragmentShaderFilePath)
throws FrameProcessingException { throws VideoFrameProcessingException {
GlProgram glProgram; GlProgram glProgram;
try { try {
glProgram = new GlProgram(context, vertexShaderFilePath, fragmentShaderFilePath); glProgram = new GlProgram(context, vertexShaderFilePath, fragmentShaderFilePath);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
float[] identityMatrix = GlUtil.create4x4IdentityMatrix(); float[] identityMatrix = GlUtil.create4x4IdentityMatrix();
...@@ -421,7 +421,8 @@ import java.util.List; ...@@ -421,7 +421,8 @@ import java.util.List;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
updateCompositeRgbaMatrixArray(presentationTimeUs); updateCompositeRgbaMatrixArray(presentationTimeUs);
updateCompositeTransformationMatrixAndVisiblePolygon(presentationTimeUs); updateCompositeTransformationMatrixAndVisiblePolygon(presentationTimeUs);
if (visiblePolygon.size() < 3) { if (visiblePolygon.size() < 3) {
...@@ -442,17 +443,17 @@ import java.util.List; ...@@ -442,17 +443,17 @@ import java.util.List;
GLES20.GL_TRIANGLE_FAN, /* first= */ 0, /* count= */ visiblePolygon.size()); GLES20.GL_TRIANGLE_FAN, /* first= */ 0, /* count= */ visiblePolygon.size());
GlUtil.checkGlError(); GlUtil.checkGlError();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs); throw new VideoFrameProcessingException(e, presentationTimeUs);
} }
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
super.release(); super.release();
try { try {
glProgram.delete(); glProgram.delete();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
package com.google.android.exoplayer2.effect; package com.google.android.exoplayer2.effect;
import android.content.Context; import android.content.Context;
import com.google.android.exoplayer2.util.FrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
/** /**
...@@ -38,7 +38,7 @@ public final class OverlayEffect implements GlEffect { ...@@ -38,7 +38,7 @@ public final class OverlayEffect implements GlEffect {
@Override @Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return new OverlayShaderProgram(context, useHdr, overlays); return new OverlayShaderProgram(context, useHdr, overlays);
} }
} }
...@@ -21,11 +21,11 @@ import android.content.Context; ...@@ -21,11 +21,11 @@ import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.Matrix; import android.opengl.Matrix;
import android.util.Pair; import android.util.Pair;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram; import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
/** Applies zero or more {@link TextureOverlay}s onto each frame. */ /** Applies zero or more {@link TextureOverlay}s onto each frame. */
...@@ -49,11 +49,11 @@ import com.google.common.collect.ImmutableList; ...@@ -49,11 +49,11 @@ import com.google.common.collect.ImmutableList;
* @param context The {@link Context}. * @param context The {@link Context}.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files. * @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/ */
public OverlayShaderProgram( public OverlayShaderProgram(
Context context, boolean useHdr, ImmutableList<TextureOverlay> overlays) Context context, boolean useHdr, ImmutableList<TextureOverlay> overlays)
throws FrameProcessingException { throws VideoFrameProcessingException {
super(useHdr); super(useHdr);
checkArgument(!useHdr, "OverlayShaderProgram does not support HDR colors yet."); checkArgument(!useHdr, "OverlayShaderProgram does not support HDR colors yet.");
// The maximum number of samplers allowed in a single GL program is 16. // The maximum number of samplers allowed in a single GL program is 16.
...@@ -70,7 +70,7 @@ import com.google.common.collect.ImmutableList; ...@@ -70,7 +70,7 @@ import com.google.common.collect.ImmutableList;
glProgram = glProgram =
new GlProgram(createVertexShader(overlays.size()), createFragmentShader(overlays.size())); new GlProgram(createVertexShader(overlays.size()), createFragmentShader(overlays.size()));
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
glProgram.setBufferAttribute( glProgram.setBufferAttribute(
...@@ -91,7 +91,8 @@ import com.google.common.collect.ImmutableList; ...@@ -91,7 +91,8 @@ import com.google.common.collect.ImmutableList;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try { try {
glProgram.use(); glProgram.use();
if (!overlays.isEmpty()) { if (!overlays.isEmpty()) {
...@@ -155,17 +156,17 @@ import com.google.common.collect.ImmutableList; ...@@ -155,17 +156,17 @@ import com.google.common.collect.ImmutableList;
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
GlUtil.checkGlError(); GlUtil.checkGlError();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs); throw new VideoFrameProcessingException(e, presentationTimeUs);
} }
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
super.release(); super.release();
try { try {
glProgram.delete(); glProgram.delete();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
......
...@@ -19,7 +19,7 @@ package com.google.android.exoplayer2.effect; ...@@ -19,7 +19,7 @@ package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.util.Assertions.checkState; import static com.google.android.exoplayer2.util.Assertions.checkState;
import android.content.Context; import android.content.Context;
import com.google.android.exoplayer2.util.FrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Provides common color filters. */ /** Provides common color filters. */
...@@ -90,7 +90,7 @@ public class RgbFilter implements RgbMatrix { ...@@ -90,7 +90,7 @@ public class RgbFilter implements RgbMatrix {
@Override @Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
checkForConsistentHdrSetting(useHdr); checkForConsistentHdrSetting(useHdr);
return RgbMatrix.super.toGlShaderProgram(context, useHdr); return RgbMatrix.super.toGlShaderProgram(context, useHdr);
} }
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
package com.google.android.exoplayer2.effect; package com.google.android.exoplayer2.effect;
import android.content.Context; import android.content.Context;
import com.google.android.exoplayer2.util.FrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
/** /**
...@@ -39,7 +39,7 @@ public interface RgbMatrix extends GlEffect { ...@@ -39,7 +39,7 @@ public interface RgbMatrix extends GlEffect {
@Override @Override
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return MatrixShaderProgram.create( return MatrixShaderProgram.create(
context, context,
/* matrixTransformations= */ ImmutableList.of(), /* matrixTransformations= */ ImmutableList.of(),
......
...@@ -24,9 +24,9 @@ import android.graphics.Bitmap; ...@@ -24,9 +24,9 @@ import android.graphics.Bitmap;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLUtils; import android.opengl.GLUtils;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/** Transforms the colors of a frame by applying the same color lookup table to each frame. */ /** Transforms the colors of a frame by applying the same color lookup table to each frame. */
public class SingleColorLut implements ColorLut { public class SingleColorLut implements ColorLut {
...@@ -148,13 +148,13 @@ public class SingleColorLut implements ColorLut { ...@@ -148,13 +148,13 @@ public class SingleColorLut implements ColorLut {
@Override @Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
checkState(!useHdr, "HDR is currently not supported."); checkState(!useHdr, "HDR is currently not supported.");
try { try {
lutTextureId = storeLutAsTexture(lut); lutTextureId = storeLutAsTexture(lut);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException("Could not store the LUT as a texture.", e); throw new VideoFrameProcessingException("Could not store the LUT as a texture.", e);
} }
return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr); return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr);
......
...@@ -18,9 +18,9 @@ package com.google.android.exoplayer2.effect; ...@@ -18,9 +18,9 @@ package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.util.Assertions.checkState; import static com.google.android.exoplayer2.util.Assertions.checkState;
import androidx.annotation.CallSuper; import androidx.annotation.CallSuper;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull; import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
...@@ -59,7 +59,7 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram { ...@@ -59,7 +59,7 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
this.useHdr = useHdr; this.useHdr = useHdr;
inputListener = new InputListener() {}; inputListener = new InputListener() {};
outputListener = new OutputListener() {}; outputListener = new OutputListener() {};
errorListener = (frameProcessingException) -> {}; errorListener = (videoFrameProcessingException) -> {};
errorListenerExecutor = MoreExecutors.directExecutor(); errorListenerExecutor = MoreExecutors.directExecutor();
} }
...@@ -72,9 +72,10 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram { ...@@ -72,9 +72,10 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
* @param inputWidth The input width, in pixels. * @param inputWidth The input width, in pixels.
* @param inputHeight The input height, in pixels. * @param inputHeight The input height, in pixels.
* @return The output width and height of frames processed through {@link #drawFrame(int, long)}. * @return The output width and height of frames processed through {@link #drawFrame(int, long)}.
* @throws FrameProcessingException If an error occurs while configuring. * @throws VideoFrameProcessingException If an error occurs while configuring.
*/ */
public abstract Size configure(int inputWidth, int inputHeight) throws FrameProcessingException; public abstract Size configure(int inputWidth, int inputHeight)
throws VideoFrameProcessingException;
/** /**
* Draws one frame. * Draws one frame.
...@@ -88,10 +89,10 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram { ...@@ -88,10 +89,10 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
* *
* @param inputTexId Identifier of a 2D OpenGL texture containing the input frame. * @param inputTexId Identifier of a 2D OpenGL texture containing the input frame.
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds. * @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame. * @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/ */
public abstract void drawFrame(int inputTexId, long presentationTimeUs) public abstract void drawFrame(int inputTexId, long presentationTimeUs)
throws FrameProcessingException; throws VideoFrameProcessingException;
@Override @Override
public final void setInputListener(InputListener inputListener) { public final void setInputListener(InputListener inputListener) {
...@@ -132,19 +133,19 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram { ...@@ -132,19 +133,19 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
drawFrame(inputTexture.texId, presentationTimeUs); drawFrame(inputTexture.texId, presentationTimeUs);
inputListener.onInputFrameProcessed(inputTexture); inputListener.onInputFrameProcessed(inputTexture);
outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs); outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs);
} catch (FrameProcessingException | GlUtil.GlException | RuntimeException e) { } catch (VideoFrameProcessingException | GlUtil.GlException | RuntimeException e) {
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> () ->
errorListener.onFrameProcessingError( errorListener.onError(
e instanceof FrameProcessingException e instanceof VideoFrameProcessingException
? (FrameProcessingException) e ? (VideoFrameProcessingException) e
: new FrameProcessingException(e))); : new VideoFrameProcessingException(e)));
} }
} }
@EnsuresNonNull("outputTexture") @EnsuresNonNull("outputTexture")
private void configureOutputTexture(int inputWidth, int inputHeight) private void configureOutputTexture(int inputWidth, int inputHeight)
throws GlUtil.GlException, FrameProcessingException { throws GlUtil.GlException, VideoFrameProcessingException {
this.inputWidth = inputWidth; this.inputWidth = inputWidth;
this.inputHeight = inputHeight; this.inputHeight = inputHeight;
Size outputSize = configure(inputWidth, inputHeight); Size outputSize = configure(inputWidth, inputHeight);
...@@ -182,12 +183,12 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram { ...@@ -182,12 +183,12 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
@Override @Override
@CallSuper @CallSuper
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
if (outputTexture != null) { if (outputTexture != null) {
try { try {
GlUtil.deleteTexture(outputTexture.texId); GlUtil.deleteTexture(outputTexture.texId);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
} }
......
...@@ -15,8 +15,8 @@ ...@@ -15,8 +15,8 @@
*/ */
package com.google.android.exoplayer2.effect; package com.google.android.exoplayer2.effect;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.Size; import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/** Creates overlays from OpenGL textures. */ /** Creates overlays from OpenGL textures. */
public abstract class TextureOverlay { public abstract class TextureOverlay {
...@@ -24,9 +24,9 @@ public abstract class TextureOverlay { ...@@ -24,9 +24,9 @@ public abstract class TextureOverlay {
* Returns the overlay texture identifier displayed at the specified timestamp. * Returns the overlay texture identifier displayed at the specified timestamp.
* *
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds. * @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame. * @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/ */
public abstract int getTextureId(long presentationTimeUs) throws FrameProcessingException; public abstract int getTextureId(long presentationTimeUs) throws VideoFrameProcessingException;
// This method is required to find the size of a texture given a texture identifier using OpenGL // This method is required to find the size of a texture given a texture identifier using OpenGL
// ES 2.0. OpenGL ES 3.1 can do this with glGetTexLevelParameteriv(). // ES 2.0. OpenGL ES 3.1 can do this with glGetTexLevelParameteriv().
......
...@@ -15,14 +15,14 @@ ...@@ -15,14 +15,14 @@
*/ */
package com.google.android.exoplayer2.effect; package com.google.android.exoplayer2.effect;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/** /**
* Interface for tasks that may throw a {@link GlUtil.GlException} or {@link * Interface for tasks that may throw a {@link GlUtil.GlException} or {@link
* FrameProcessingException}. * VideoFrameProcessingException}.
*/ */
/* package */ interface FrameProcessingTask { /* package */ interface VideoFrameProcessingTask {
/** Runs the task. */ /** Runs the task. */
void run() throws FrameProcessingException, GlUtil.GlException; void run() throws VideoFrameProcessingException, GlUtil.GlException;
} }
...@@ -19,8 +19,8 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS; ...@@ -19,8 +19,8 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
import androidx.annotation.GuardedBy; import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.util.FrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor; import com.google.android.exoplayer2.util.VideoFrameProcessor;
import java.util.ArrayDeque; import java.util.ArrayDeque;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
...@@ -29,36 +29,36 @@ import java.util.concurrent.Future; ...@@ -29,36 +29,36 @@ import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.RejectedExecutionException;
/** /**
* Wrapper around a single thread {@link ExecutorService} for executing {@link FrameProcessingTask} * Wrapper around a single thread {@link ExecutorService} for executing {@link
* instances. * VideoFrameProcessingTask} instances.
* *
* <p>Public methods can be called from any thread. * <p>Public methods can be called from any thread.
* *
* <p>The wrapper handles calling {@link * <p>The wrapper handles calling {@link
* FrameProcessor.Listener#onFrameProcessingError(FrameProcessingException)} for errors that occur * VideoFrameProcessor.Listener#onError(VideoFrameProcessingException)} for errors that occur during
* during these tasks. The listener is invoked from the {@link ExecutorService}. Errors are assumed * these tasks. The listener is invoked from the {@link ExecutorService}. Errors are assumed to be
* to be non-recoverable, so the {@code FrameProcessingTaskExecutor} should be released if an error * non-recoverable, so the {@code VideoFrameProcessingTaskExecutor} should be released if an error
* occurs. * occurs.
* *
* <p>{@linkplain #submitWithHighPriority(FrameProcessingTask) High priority tasks} are always * <p>{@linkplain #submitWithHighPriority(VideoFrameProcessingTask) High priority tasks} are always
* executed before {@linkplain #submit(FrameProcessingTask) default priority tasks}. Tasks with * executed before {@linkplain #submit(VideoFrameProcessingTask) default priority tasks}. Tasks with
* equal priority are executed in FIFO order. * equal priority are executed in FIFO order.
*/ */
/* package */ final class FrameProcessingTaskExecutor { /* package */ final class VideoFrameProcessingTaskExecutor {
private final ExecutorService singleThreadExecutorService; private final ExecutorService singleThreadExecutorService;
private final FrameProcessor.Listener listener; private final VideoFrameProcessor.Listener listener;
private final Object lock; private final Object lock;
@GuardedBy("lock") @GuardedBy("lock")
private final ArrayDeque<FrameProcessingTask> highPriorityTasks; private final ArrayDeque<VideoFrameProcessingTask> highPriorityTasks;
@GuardedBy("lock") @GuardedBy("lock")
private boolean shouldCancelTasks; private boolean shouldCancelTasks;
/** Creates a new instance. */ /** Creates a new instance. */
public FrameProcessingTaskExecutor( public VideoFrameProcessingTaskExecutor(
ExecutorService singleThreadExecutorService, FrameProcessor.Listener listener) { ExecutorService singleThreadExecutorService, VideoFrameProcessor.Listener listener) {
this.singleThreadExecutorService = singleThreadExecutorService; this.singleThreadExecutorService = singleThreadExecutorService;
this.listener = listener; this.listener = listener;
lock = new Object(); lock = new Object();
...@@ -66,11 +66,11 @@ import java.util.concurrent.RejectedExecutionException; ...@@ -66,11 +66,11 @@ import java.util.concurrent.RejectedExecutionException;
} }
/** /**
* Submits the given {@link FrameProcessingTask} to be executed after all pending tasks have * Submits the given {@link VideoFrameProcessingTask} to be executed after all pending tasks have
* completed. * completed.
*/ */
@SuppressWarnings("FutureReturnValueIgnored") @SuppressWarnings("FutureReturnValueIgnored")
public void submit(FrameProcessingTask task) { public void submit(VideoFrameProcessingTask task) {
@Nullable RejectedExecutionException executionException = null; @Nullable RejectedExecutionException executionException = null;
synchronized (lock) { synchronized (lock) {
if (shouldCancelTasks) { if (shouldCancelTasks) {
...@@ -89,13 +89,13 @@ import java.util.concurrent.RejectedExecutionException; ...@@ -89,13 +89,13 @@ import java.util.concurrent.RejectedExecutionException;
} }
/** /**
* Submits the given {@link FrameProcessingTask} to be executed after the currently running task * Submits the given {@link VideoFrameProcessingTask} to be executed after the currently running
* and all previously submitted high-priority tasks have completed. * task and all previously submitted high-priority tasks have completed.
* *
* <p>Tasks that were previously {@linkplain #submit(FrameProcessingTask) submitted} without * <p>Tasks that were previously {@linkplain #submit(VideoFrameProcessingTask) submitted} without
* high-priority and have not started executing will be executed after this task is complete. * high-priority and have not started executing will be executed after this task is complete.
*/ */
public void submitWithHighPriority(FrameProcessingTask task) { public void submitWithHighPriority(VideoFrameProcessingTask task) {
synchronized (lock) { synchronized (lock) {
if (shouldCancelTasks) { if (shouldCancelTasks) {
return; return;
...@@ -111,7 +111,7 @@ import java.util.concurrent.RejectedExecutionException; ...@@ -111,7 +111,7 @@ import java.util.concurrent.RejectedExecutionException;
/** /**
* Flushes all scheduled tasks. * Flushes all scheduled tasks.
* *
* <p>During flush, the {@code FrameProcessingTaskExecutor} ignores the {@linkplain #submit * <p>During flush, the {@code VideoFrameProcessingTaskExecutor} ignores the {@linkplain #submit
* submission of new tasks}. The tasks that are submitted before flushing are either executed or * submission of new tasks}. The tasks that are submitted before flushing are either executed or
* canceled when this method returns. * canceled when this method returns.
*/ */
...@@ -137,12 +137,12 @@ import java.util.concurrent.RejectedExecutionException; ...@@ -137,12 +137,12 @@ import java.util.concurrent.RejectedExecutionException;
/** /**
* Cancels remaining tasks, runs the given release task, and shuts down the background thread. * Cancels remaining tasks, runs the given release task, and shuts down the background thread.
* *
* @param releaseTask A {@link FrameProcessingTask} to execute before shutting down the background * @param releaseTask A {@link VideoFrameProcessingTask} to execute before shutting down the
* thread. * background thread.
* @param releaseWaitTimeMs How long to wait for the release task to terminate, in milliseconds. * @param releaseWaitTimeMs How long to wait for the release task to terminate, in milliseconds.
* @throws InterruptedException If interrupted while releasing resources. * @throws InterruptedException If interrupted while releasing resources.
*/ */
public void release(FrameProcessingTask releaseTask, long releaseWaitTimeMs) public void release(VideoFrameProcessingTask releaseTask, long releaseWaitTimeMs)
throws InterruptedException { throws InterruptedException {
synchronized (lock) { synchronized (lock) {
shouldCancelTasks = true; shouldCancelTasks = true;
...@@ -153,16 +153,16 @@ import java.util.concurrent.RejectedExecutionException; ...@@ -153,16 +153,16 @@ import java.util.concurrent.RejectedExecutionException;
singleThreadExecutorService.shutdown(); singleThreadExecutorService.shutdown();
try { try {
if (!singleThreadExecutorService.awaitTermination(releaseWaitTimeMs, MILLISECONDS)) { if (!singleThreadExecutorService.awaitTermination(releaseWaitTimeMs, MILLISECONDS)) {
listener.onFrameProcessingError(new FrameProcessingException("Release timed out")); listener.onError(new VideoFrameProcessingException("Release timed out"));
} }
releaseFuture.get(); releaseFuture.get();
} catch (ExecutionException e) { } catch (ExecutionException e) {
listener.onFrameProcessingError(new FrameProcessingException(e)); listener.onError(new VideoFrameProcessingException(e));
} }
} }
private Future<?> wrapTaskAndSubmitToExecutorService( private Future<?> wrapTaskAndSubmitToExecutorService(
FrameProcessingTask defaultPriorityTask, boolean isFlushOrReleaseTask) { VideoFrameProcessingTask defaultPriorityTask, boolean isFlushOrReleaseTask) {
return singleThreadExecutorService.submit( return singleThreadExecutorService.submit(
() -> { () -> {
try { try {
...@@ -172,7 +172,7 @@ import java.util.concurrent.RejectedExecutionException; ...@@ -172,7 +172,7 @@ import java.util.concurrent.RejectedExecutionException;
} }
} }
@Nullable FrameProcessingTask nextHighPriorityTask; @Nullable VideoFrameProcessingTask nextHighPriorityTask;
while (true) { while (true) {
synchronized (lock) { synchronized (lock) {
// Lock only polling to prevent blocking the public method calls. // Lock only polling to prevent blocking the public method calls.
...@@ -199,6 +199,6 @@ import java.util.concurrent.RejectedExecutionException; ...@@ -199,6 +199,6 @@ import java.util.concurrent.RejectedExecutionException;
} }
shouldCancelTasks = true; shouldCancelTasks = true;
} }
listener.onFrameProcessingError(FrameProcessingException.from(exception)); listener.onError(VideoFrameProcessingException.from(exception));
} }
} }
...@@ -19,8 +19,8 @@ import static org.mockito.Mockito.mock; ...@@ -19,8 +19,8 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import org.junit.After; import org.junit.After;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
...@@ -30,20 +30,22 @@ import org.junit.runner.RunWith; ...@@ -30,20 +30,22 @@ import org.junit.runner.RunWith;
public final class ChainingGlShaderProgramListenerTest { public final class ChainingGlShaderProgramListenerTest {
private static final long EXECUTOR_WAIT_TIME_MS = 100; private static final long EXECUTOR_WAIT_TIME_MS = 100;
private final FrameProcessor.Listener mockFrameProcessorListener = private final VideoFrameProcessor.Listener mockFrameProcessorListener =
mock(FrameProcessor.Listener.class); mock(VideoFrameProcessor.Listener.class);
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor = private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new FrameProcessingTaskExecutor( new VideoFrameProcessingTaskExecutor(
Util.newSingleThreadExecutor("Test"), mockFrameProcessorListener); Util.newSingleThreadExecutor("Test"), mockFrameProcessorListener);
private final GlShaderProgram mockProducingGlShaderProgram = mock(GlShaderProgram.class); private final GlShaderProgram mockProducingGlShaderProgram = mock(GlShaderProgram.class);
private final GlShaderProgram mockConsumingGlShaderProgram = mock(GlShaderProgram.class); private final GlShaderProgram mockConsumingGlShaderProgram = mock(GlShaderProgram.class);
private final ChainingGlShaderProgramListener chainingGlShaderProgramListener = private final ChainingGlShaderProgramListener chainingGlShaderProgramListener =
new ChainingGlShaderProgramListener( new ChainingGlShaderProgramListener(
mockProducingGlShaderProgram, mockConsumingGlShaderProgram, frameProcessingTaskExecutor); mockProducingGlShaderProgram,
mockConsumingGlShaderProgram,
videoFrameProcessingTaskExecutor);
@After @After
public void release() throws InterruptedException { public void release() throws InterruptedException {
frameProcessingTaskExecutor.release(/* releaseTask= */ () -> {}, EXECUTOR_WAIT_TIME_MS); videoFrameProcessingTaskExecutor.release(/* releaseTask= */ () -> {}, EXECUTOR_WAIT_TIME_MS);
} }
@Test @Test
......
...@@ -28,9 +28,9 @@ import android.graphics.Bitmap; ...@@ -28,9 +28,9 @@ import android.graphics.Bitmap;
import android.util.Log; import android.util.Log;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.effect.GlEffectsFrameProcessor; import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.testutil.DecodeOneFrameUtil; import com.google.android.exoplayer2.testutil.DecodeOneFrameUtil;
import com.google.android.exoplayer2.testutil.FrameProcessorTestRunner; import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.ColorInfo;
...@@ -40,10 +40,10 @@ import org.junit.Test; ...@@ -40,10 +40,10 @@ import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
/** /**
* Instrumentation pixel-test for HDR to SDR tone-mapping via {@link GlEffectsFrameProcessor}. * Instrumentation pixel-test for HDR to SDR tone-mapping via {@link DefaultVideoFrameProcessor}.
* *
* <p>Uses a {@link GlEffectsFrameProcessor} to process one frame, and checks that the actual output * <p>Uses a {@link DefaultVideoFrameProcessor} to process one frame, and checks that the actual
* matches expected output, either from a golden file or from another edit. * output matches expected output, either from a golden file or from another edit.
*/ */
// TODO(b/263395272): Move this test to effects/mh tests. // TODO(b/263395272): Move this test to effects/mh tests.
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
...@@ -75,12 +75,12 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest { ...@@ -75,12 +75,12 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
"OpenGL-based HDR to SDR tone mapping is unsupported below API 29."; "OpenGL-based HDR to SDR tone mapping is unsupported below API 29.";
private static final String SKIP_REASON_NO_YUV = "Device lacks YUV extension support."; private static final String SKIP_REASON_NO_YUV = "Device lacks YUV extension support.";
private @MonotonicNonNull FrameProcessorTestRunner frameProcessorTestRunner; private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
@After @After
public void release() { public void release() {
if (frameProcessorTestRunner != null) { if (videoFrameProcessorTestRunner != null) {
frameProcessorTestRunner.release(); videoFrameProcessorTestRunner.release();
} }
} }
...@@ -114,7 +114,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest { ...@@ -114,7 +114,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.setColorRange(C.COLOR_RANGE_LIMITED) .setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2) .setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2)
.build(); .build();
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setVideoAssetPath(INPUT_HLG_MP4_ASSET_STRING) .setVideoAssetPath(INPUT_HLG_MP4_ASSET_STRING)
.setInputColorInfo(hlgColor) .setInputColorInfo(hlgColor)
...@@ -124,7 +124,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest { ...@@ -124,7 +124,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
Bitmap actualBitmap; Bitmap actualBitmap;
try { try {
actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
} catch (UnsupportedOperationException e) { } catch (UnsupportedOperationException e) {
if (e.getMessage() != null if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) { && e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
...@@ -177,7 +177,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest { ...@@ -177,7 +177,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.setColorRange(C.COLOR_RANGE_LIMITED) .setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2) .setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2)
.build(); .build();
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING) .setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING)
.setInputColorInfo(pqColor) .setInputColorInfo(pqColor)
...@@ -187,7 +187,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest { ...@@ -187,7 +187,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
Bitmap actualBitmap; Bitmap actualBitmap;
try { try {
actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
} catch (UnsupportedOperationException e) { } catch (UnsupportedOperationException e) {
if (e.getMessage() != null if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) { && e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
...@@ -209,10 +209,10 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest { ...@@ -209,10 +209,10 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.isAtMost(MAXIMUM_DEVICE_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE); .isAtMost(MAXIMUM_DEVICE_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
} }
private FrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder( private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) { String testId) {
return new FrameProcessorTestRunner.Builder() return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId) .setTestId(testId)
.setFrameProcessorFactory(new GlEffectsFrameProcessor.Factory()); .setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory());
} }
} }
...@@ -17,9 +17,9 @@ package com.google.android.exoplayer2.transformer; ...@@ -17,9 +17,9 @@ package com.google.android.exoplayer2.transformer;
import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.audio.AudioProcessor; import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.effect.GlEffectsFrameProcessor; import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.util.Effect; import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameProcessor; import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.util.List; import java.util.List;
...@@ -43,19 +43,19 @@ public final class Effects { ...@@ -43,19 +43,19 @@ public final class Effects {
*/ */
public final ImmutableList<Effect> videoEffects; public final ImmutableList<Effect> videoEffects;
/** /**
* The {@link FrameProcessor.Factory} for the {@link FrameProcessor} to use when applying the * The {@link VideoFrameProcessor.Factory} for the {@link VideoFrameProcessor} to use when
* {@code videoEffects} to the video frames. * applying the {@code videoEffects} to the video frames.
*/ */
public final FrameProcessor.Factory frameProcessorFactory; public final VideoFrameProcessor.Factory videoFrameProcessorFactory;
/** /**
* Creates an instance using a {@link GlEffectsFrameProcessor.Factory}. * Creates an instance using a {@link DefaultVideoFrameProcessor.Factory}.
* *
* <p>This is equivalent to calling {@link Effects#Effects(List, List, FrameProcessor.Factory)} * <p>This is equivalent to calling {@link Effects#Effects(List, List,
* with a {@link GlEffectsFrameProcessor.Factory}. * VideoFrameProcessor.Factory)} with a {@link DefaultVideoFrameProcessor.Factory}.
*/ */
public Effects(List<AudioProcessor> audioProcessors, List<Effect> videoEffects) { public Effects(List<AudioProcessor> audioProcessors, List<Effect> videoEffects) {
this(audioProcessors, videoEffects, new GlEffectsFrameProcessor.Factory()); this(audioProcessors, videoEffects, new DefaultVideoFrameProcessor.Factory());
} }
/** /**
...@@ -63,14 +63,14 @@ public final class Effects { ...@@ -63,14 +63,14 @@ public final class Effects {
* *
* @param audioProcessors The {@link #audioProcessors}. * @param audioProcessors The {@link #audioProcessors}.
* @param videoEffects The {@link #videoEffects}. * @param videoEffects The {@link #videoEffects}.
* @param frameProcessorFactory The {@link #frameProcessorFactory}. * @param videoFrameProcessorFactory The {@link #videoFrameProcessorFactory}.
*/ */
public Effects( public Effects(
List<AudioProcessor> audioProcessors, List<AudioProcessor> audioProcessors,
List<Effect> videoEffects, List<Effect> videoEffects,
FrameProcessor.Factory frameProcessorFactory) { VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.audioProcessors = ImmutableList.copyOf(audioProcessors); this.audioProcessors = ImmutableList.copyOf(audioProcessors);
this.videoEffects = ImmutableList.copyOf(videoEffects); this.videoEffects = ImmutableList.copyOf(videoEffects);
this.frameProcessorFactory = frameProcessorFactory; this.videoFrameProcessorFactory = videoFrameProcessorFactory;
} }
} }
...@@ -23,9 +23,9 @@ import androidx.annotation.Nullable; ...@@ -23,9 +23,9 @@ import androidx.annotation.Nullable;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.AudioProcessor.AudioFormat; import com.google.android.exoplayer2.audio.AudioProcessor.AudioFormat;
import com.google.android.exoplayer2.util.Clock; import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableBiMap;
import java.lang.annotation.Documented; import java.lang.annotation.Documented;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
...@@ -64,7 +64,7 @@ public final class TransformationException extends Exception { ...@@ -64,7 +64,7 @@ public final class TransformationException extends Exception {
ERROR_CODE_ENCODER_INIT_FAILED, ERROR_CODE_ENCODER_INIT_FAILED,
ERROR_CODE_ENCODING_FAILED, ERROR_CODE_ENCODING_FAILED,
ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED, ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED,
ERROR_CODE_FRAME_PROCESSING_FAILED, ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED,
ERROR_CODE_AUDIO_PROCESSING_FAILED, ERROR_CODE_AUDIO_PROCESSING_FAILED,
ERROR_CODE_MUXING_FAILED, ERROR_CODE_MUXING_FAILED,
}) })
...@@ -149,8 +149,8 @@ public final class TransformationException extends Exception { ...@@ -149,8 +149,8 @@ public final class TransformationException extends Exception {
// Video editing errors (5xxx). // Video editing errors (5xxx).
/** Caused by a frame processing failure. */ /** Caused by a video frame processing failure. */
public static final int ERROR_CODE_FRAME_PROCESSING_FAILED = 5001; public static final int ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED = 5001;
// Audio processing errors (6xxx). // Audio processing errors (6xxx).
...@@ -180,7 +180,7 @@ public final class TransformationException extends Exception { ...@@ -180,7 +180,7 @@ public final class TransformationException extends Exception {
.put("ERROR_CODE_ENCODER_INIT_FAILED", ERROR_CODE_ENCODER_INIT_FAILED) .put("ERROR_CODE_ENCODER_INIT_FAILED", ERROR_CODE_ENCODER_INIT_FAILED)
.put("ERROR_CODE_ENCODING_FAILED", ERROR_CODE_ENCODING_FAILED) .put("ERROR_CODE_ENCODING_FAILED", ERROR_CODE_ENCODING_FAILED)
.put("ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED", ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED) .put("ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED", ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED)
.put("ERROR_CODE_FRAME_PROCESSING_FAILED", ERROR_CODE_FRAME_PROCESSING_FAILED) .put("ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED", ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED)
.put("ERROR_CODE_AUDIO_PROCESSING_FAILED", ERROR_CODE_AUDIO_PROCESSING_FAILED) .put("ERROR_CODE_AUDIO_PROCESSING_FAILED", ERROR_CODE_AUDIO_PROCESSING_FAILED)
.put("ERROR_CODE_MUXING_FAILED", ERROR_CODE_MUXING_FAILED) .put("ERROR_CODE_MUXING_FAILED", ERROR_CODE_MUXING_FAILED)
.buildOrThrow(); .buildOrThrow();
...@@ -269,15 +269,15 @@ public final class TransformationException extends Exception { ...@@ -269,15 +269,15 @@ public final class TransformationException extends Exception {
} }
/** /**
* Creates an instance for a {@link FrameProcessor} related exception. * Creates an instance for a {@link VideoFrameProcessor} related exception.
* *
* @param cause The cause of the failure. * @param cause The cause of the failure.
* @param errorCode See {@link #errorCode}. * @param errorCode See {@link #errorCode}.
* @return The created instance. * @return The created instance.
*/ */
/* package */ static TransformationException createForFrameProcessingException( /* package */ static TransformationException createForVideoFrameProcessingException(
FrameProcessingException cause, int errorCode) { VideoFrameProcessingException cause, int errorCode) {
return new TransformationException("Frame processing error", cause, errorCode); return new TransformationException("Video frame processing error", cause, errorCode);
} }
/** /**
......
...@@ -30,16 +30,16 @@ import com.google.android.exoplayer2.ExoPlayerLibraryInfo; ...@@ -30,16 +30,16 @@ import com.google.android.exoplayer2.ExoPlayerLibraryInfo;
import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.audio.AudioProcessor; import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.SonicAudioProcessor; import com.google.android.exoplayer2.audio.SonicAudioProcessor;
import com.google.android.exoplayer2.effect.GlEffectsFrameProcessor; import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.source.DefaultMediaSourceFactory; import com.google.android.exoplayer2.source.DefaultMediaSourceFactory;
import com.google.android.exoplayer2.util.Clock; import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.DebugViewProvider; import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect; import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.HandlerWrapper; import com.google.android.exoplayer2.util.HandlerWrapper;
import com.google.android.exoplayer2.util.ListenerSet; import com.google.android.exoplayer2.util.ListenerSet;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.InlineMe; import com.google.errorprone.annotations.InlineMe;
...@@ -87,7 +87,7 @@ public final class Transformer { ...@@ -87,7 +87,7 @@ public final class Transformer {
private boolean generateSilentAudio; private boolean generateSilentAudio;
private ListenerSet<Transformer.Listener> listeners; private ListenerSet<Transformer.Listener> listeners;
private AssetLoader.@MonotonicNonNull Factory assetLoaderFactory; private AssetLoader.@MonotonicNonNull Factory assetLoaderFactory;
private FrameProcessor.Factory frameProcessorFactory; private VideoFrameProcessor.Factory videoFrameProcessorFactory;
private Codec.EncoderFactory encoderFactory; private Codec.EncoderFactory encoderFactory;
private Muxer.Factory muxerFactory; private Muxer.Factory muxerFactory;
private Looper looper; private Looper looper;
...@@ -104,7 +104,7 @@ public final class Transformer { ...@@ -104,7 +104,7 @@ public final class Transformer {
transformationRequest = new TransformationRequest.Builder().build(); transformationRequest = new TransformationRequest.Builder().build();
audioProcessors = ImmutableList.of(); audioProcessors = ImmutableList.of();
videoEffects = ImmutableList.of(); videoEffects = ImmutableList.of();
frameProcessorFactory = new GlEffectsFrameProcessor.Factory(); videoFrameProcessorFactory = new DefaultVideoFrameProcessor.Factory();
encoderFactory = new DefaultEncoderFactory.Builder(this.context).build(); encoderFactory = new DefaultEncoderFactory.Builder(this.context).build();
muxerFactory = new DefaultMuxer.Factory(); muxerFactory = new DefaultMuxer.Factory();
looper = Util.getCurrentOrMainLooper(); looper = Util.getCurrentOrMainLooper();
...@@ -124,7 +124,7 @@ public final class Transformer { ...@@ -124,7 +124,7 @@ public final class Transformer {
this.generateSilentAudio = transformer.generateSilentAudio; this.generateSilentAudio = transformer.generateSilentAudio;
this.listeners = transformer.listeners; this.listeners = transformer.listeners;
this.assetLoaderFactory = transformer.assetLoaderFactory; this.assetLoaderFactory = transformer.assetLoaderFactory;
this.frameProcessorFactory = transformer.frameProcessorFactory; this.videoFrameProcessorFactory = transformer.videoFrameProcessorFactory;
this.encoderFactory = transformer.encoderFactory; this.encoderFactory = transformer.encoderFactory;
this.muxerFactory = transformer.muxerFactory; this.muxerFactory = transformer.muxerFactory;
this.looper = transformer.looper; this.looper = transformer.looper;
...@@ -296,13 +296,14 @@ public final class Transformer { ...@@ -296,13 +296,14 @@ public final class Transformer {
} }
/** /**
* @deprecated Set the {@link FrameProcessor.Factory} in an {@link EditedMediaItem}, and pass it * @deprecated Set the {@link VideoFrameProcessor.Factory} in an {@link EditedMediaItem}, and
* to {@link #start(EditedMediaItem, String)} instead. * pass it to {@link #start(EditedMediaItem, String)} instead.
*/ */
@CanIgnoreReturnValue @CanIgnoreReturnValue
@Deprecated @Deprecated
public Builder setFrameProcessorFactory(FrameProcessor.Factory frameProcessorFactory) { public Builder setFrameProcessorFactory(
this.frameProcessorFactory = frameProcessorFactory; VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
return this; return this;
} }
...@@ -448,7 +449,7 @@ public final class Transformer { ...@@ -448,7 +449,7 @@ public final class Transformer {
generateSilentAudio, generateSilentAudio,
listeners, listeners,
assetLoaderFactory, assetLoaderFactory,
frameProcessorFactory, videoFrameProcessorFactory,
encoderFactory, encoderFactory,
muxerFactory, muxerFactory,
looper, looper,
...@@ -606,7 +607,7 @@ public final class Transformer { ...@@ -606,7 +607,7 @@ public final class Transformer {
private final boolean generateSilentAudio; private final boolean generateSilentAudio;
private final ListenerSet<Transformer.Listener> listeners; private final ListenerSet<Transformer.Listener> listeners;
private final AssetLoader.Factory assetLoaderFactory; private final AssetLoader.Factory assetLoaderFactory;
private final FrameProcessor.Factory frameProcessorFactory; private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
private final Codec.EncoderFactory encoderFactory; private final Codec.EncoderFactory encoderFactory;
private final Muxer.Factory muxerFactory; private final Muxer.Factory muxerFactory;
private final Looper looper; private final Looper looper;
...@@ -627,7 +628,7 @@ public final class Transformer { ...@@ -627,7 +628,7 @@ public final class Transformer {
boolean generateSilentAudio, boolean generateSilentAudio,
ListenerSet<Listener> listeners, ListenerSet<Listener> listeners,
AssetLoader.Factory assetLoaderFactory, AssetLoader.Factory assetLoaderFactory,
FrameProcessor.Factory frameProcessorFactory, VideoFrameProcessor.Factory videoFrameProcessorFactory,
Codec.EncoderFactory encoderFactory, Codec.EncoderFactory encoderFactory,
Muxer.Factory muxerFactory, Muxer.Factory muxerFactory,
Looper looper, Looper looper,
...@@ -645,7 +646,7 @@ public final class Transformer { ...@@ -645,7 +646,7 @@ public final class Transformer {
this.generateSilentAudio = generateSilentAudio; this.generateSilentAudio = generateSilentAudio;
this.listeners = listeners; this.listeners = listeners;
this.assetLoaderFactory = assetLoaderFactory; this.assetLoaderFactory = assetLoaderFactory;
this.frameProcessorFactory = frameProcessorFactory; this.videoFrameProcessorFactory = videoFrameProcessorFactory;
this.encoderFactory = encoderFactory; this.encoderFactory = encoderFactory;
this.muxerFactory = muxerFactory; this.muxerFactory = muxerFactory;
this.looper = looper; this.looper = looper;
...@@ -842,7 +843,7 @@ public final class Transformer { ...@@ -842,7 +843,7 @@ public final class Transformer {
.setRemoveAudio(removeAudio) .setRemoveAudio(removeAudio)
.setRemoveVideo(removeVideo) .setRemoveVideo(removeVideo)
.setFlattenForSlowMotion(flattenForSlowMotion) .setFlattenForSlowMotion(flattenForSlowMotion)
.setEffects(new Effects(audioProcessors, videoEffects, frameProcessorFactory)) .setEffects(new Effects(audioProcessors, videoEffects, videoFrameProcessorFactory))
.build(); .build();
start(editedMediaItem, path); start(editedMediaItem, path);
} }
......
...@@ -498,7 +498,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -498,7 +498,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
streamOffsetUs, streamOffsetUs,
transformationRequest, transformationRequest,
firstEditedMediaItem.effects.videoEffects, firstEditedMediaItem.effects.videoEffects,
firstEditedMediaItem.effects.frameProcessorFactory, firstEditedMediaItem.effects.videoFrameProcessorFactory,
encoderFactory, encoderFactory,
muxerWrapper, muxerWrapper,
/* errorConsumer= */ this::onError, /* errorConsumer= */ this::onError,
......
...@@ -38,12 +38,12 @@ import com.google.android.exoplayer2.util.Consumer; ...@@ -38,12 +38,12 @@ import com.google.android.exoplayer2.util.Consumer;
import com.google.android.exoplayer2.util.DebugViewProvider; import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect; import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameInfo; import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.SurfaceInfo; import com.google.android.exoplayer2.util.SurfaceInfo;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
...@@ -58,8 +58,8 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -58,8 +58,8 @@ import org.checkerframework.dataflow.qual.Pure;
/** MIME type to use for output video if the input type is not a video. */ /** MIME type to use for output video if the input type is not a video. */
private static final String DEFAULT_OUTPUT_MIME_TYPE = MimeTypes.VIDEO_H265; private static final String DEFAULT_OUTPUT_MIME_TYPE = MimeTypes.VIDEO_H265;
private final FrameProcessor frameProcessor; private final VideoFrameProcessor videoFrameProcessor;
private final ColorInfo frameProcessorInputColor; private final ColorInfo videoFrameProcessorInputColor;
private final FrameInfo firstFrameInfo; private final FrameInfo firstFrameInfo;
private final EncoderWrapper encoderWrapper; private final EncoderWrapper encoderWrapper;
...@@ -67,7 +67,7 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -67,7 +67,7 @@ import org.checkerframework.dataflow.qual.Pure;
/** /**
* The timestamp of the last buffer processed before {@linkplain * The timestamp of the last buffer processed before {@linkplain
* FrameProcessor.Listener#onFrameProcessingEnded() frame processing has ended}. * VideoFrameProcessor.Listener#onEnded() frame processing has ended}.
*/ */
private volatile long finalFramePresentationTimeUs; private volatile long finalFramePresentationTimeUs;
...@@ -78,7 +78,7 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -78,7 +78,7 @@ import org.checkerframework.dataflow.qual.Pure;
long streamOffsetUs, long streamOffsetUs,
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
ImmutableList<Effect> effects, ImmutableList<Effect> effects,
FrameProcessor.Factory frameProcessorFactory, VideoFrameProcessor.Factory videoFrameProcessorFactory,
Codec.EncoderFactory encoderFactory, Codec.EncoderFactory encoderFactory,
MuxerWrapper muxerWrapper, MuxerWrapper muxerWrapper,
Consumer<TransformationException> errorConsumer, Consumer<TransformationException> errorConsumer,
...@@ -122,12 +122,12 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -122,12 +122,12 @@ import org.checkerframework.dataflow.qual.Pure;
ColorInfo encoderInputColor = encoderWrapper.getSupportedInputColor(); ColorInfo encoderInputColor = encoderWrapper.getSupportedInputColor();
// If not tone mapping using OpenGL, the decoder will output the encoderInputColor, // If not tone mapping using OpenGL, the decoder will output the encoderInputColor,
// possibly by tone mapping. // possibly by tone mapping.
frameProcessorInputColor = videoFrameProcessorInputColor =
isGlToneMapping ? checkNotNull(firstInputFormat.colorInfo) : encoderInputColor; isGlToneMapping ? checkNotNull(firstInputFormat.colorInfo) : encoderInputColor;
// For consistency with the Android platform, OpenGL tone mapping outputs colors with // For consistency with the Android platform, OpenGL tone mapping outputs colors with
// C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as // C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as
// C.COLOR_TRANSFER_SDR to the encoder. // C.COLOR_TRANSFER_SDR to the encoder.
ColorInfo frameProcessorOutputColor = ColorInfo videoFrameProcessorOutputColor =
isGlToneMapping isGlToneMapping
? new ColorInfo.Builder() ? new ColorInfo.Builder()
.setColorSpace(C.COLOR_SPACE_BT709) .setColorSpace(C.COLOR_SPACE_BT709)
...@@ -136,23 +136,23 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -136,23 +136,23 @@ import org.checkerframework.dataflow.qual.Pure;
.build() .build()
: encoderInputColor; : encoderInputColor;
try { try {
frameProcessor = videoFrameProcessor =
frameProcessorFactory.create( videoFrameProcessorFactory.create(
context, context,
effects, effects,
debugViewProvider, debugViewProvider,
frameProcessorInputColor, videoFrameProcessorInputColor,
frameProcessorOutputColor, videoFrameProcessorOutputColor,
MimeTypes.isVideo(firstInputFormat.sampleMimeType), MimeTypes.isVideo(firstInputFormat.sampleMimeType),
/* releaseFramesAutomatically= */ true, /* releaseFramesAutomatically= */ true,
MoreExecutors.directExecutor(), MoreExecutors.directExecutor(),
new FrameProcessor.Listener() { new VideoFrameProcessor.Listener() {
private long lastProcessedFramePresentationTimeUs; private long lastProcessedFramePresentationTimeUs;
@Override @Override
public void onOutputSizeChanged(int width, int height) { public void onOutputSizeChanged(int width, int height) {
try { try {
checkNotNull(frameProcessor) checkNotNull(videoFrameProcessor)
.setOutputSurfaceInfo(encoderWrapper.getSurfaceInfo(width, height)); .setOutputSurfaceInfo(encoderWrapper.getSurfaceInfo(width, height));
} catch (TransformationException exception) { } catch (TransformationException exception) {
errorConsumer.accept(exception); errorConsumer.accept(exception);
...@@ -166,14 +166,15 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -166,14 +166,15 @@ import org.checkerframework.dataflow.qual.Pure;
} }
@Override @Override
public void onFrameProcessingError(FrameProcessingException exception) { public void onError(VideoFrameProcessingException exception) {
errorConsumer.accept( errorConsumer.accept(
TransformationException.createForFrameProcessingException( TransformationException.createForVideoFrameProcessingException(
exception, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED)); exception,
TransformationException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED));
} }
@Override @Override
public void onFrameProcessingEnded() { public void onEnded() {
VideoSamplePipeline.this.finalFramePresentationTimeUs = VideoSamplePipeline.this.finalFramePresentationTimeUs =
lastProcessedFramePresentationTimeUs; lastProcessedFramePresentationTimeUs;
try { try {
...@@ -183,9 +184,9 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -183,9 +184,9 @@ import org.checkerframework.dataflow.qual.Pure;
} }
} }
}); });
} catch (FrameProcessingException e) { } catch (VideoFrameProcessingException e) {
throw TransformationException.createForFrameProcessingException( throw TransformationException.createForVideoFrameProcessingException(
e, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED); e, TransformationException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED);
} }
// The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees. // The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
int decodedWidth = int decodedWidth =
...@@ -206,43 +207,43 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -206,43 +207,43 @@ import org.checkerframework.dataflow.qual.Pure;
@Override @Override
public void onMediaItemChanged( public void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) { EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) {
frameProcessor.setInputFrameInfo( videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(mediaItemOffsetUs).build()); new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(mediaItemOffsetUs).build());
} }
@Override @Override
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) { public void queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
frameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate); videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
} }
@Override @Override
public Surface getInputSurface() { public Surface getInputSurface() {
return frameProcessor.getInputSurface(); return videoFrameProcessor.getInputSurface();
} }
@Override @Override
public ColorInfo getExpectedInputColorInfo() { public ColorInfo getExpectedInputColorInfo() {
return frameProcessorInputColor; return videoFrameProcessorInputColor;
} }
@Override @Override
public void registerVideoFrame() { public void registerVideoFrame() {
frameProcessor.registerInputFrame(); videoFrameProcessor.registerInputFrame();
} }
@Override @Override
public int getPendingVideoFrameCount() { public int getPendingVideoFrameCount() {
return frameProcessor.getPendingInputFrameCount(); return videoFrameProcessor.getPendingInputFrameCount();
} }
@Override @Override
public void signalEndOfVideoInput() { public void signalEndOfVideoInput() {
frameProcessor.signalEndOfInput(); videoFrameProcessor.signalEndOfInput();
} }
@Override @Override
public void release() { public void release() {
frameProcessor.release(); videoFrameProcessor.release();
encoderWrapper.release(); encoderWrapper.release();
} }
......
...@@ -78,7 +78,7 @@ com.google.android.exoplayer2.text androidx.media3.common.text CueGroup Cue ...@@ -78,7 +78,7 @@ com.google.android.exoplayer2.text androidx.media3.common.text CueGroup Cue
com.google.android.exoplayer2.text com.google.android.exoplayer2.text ExoplayerCuesDecoder SubtitleDecoderFactory TextOutput TextRenderer com.google.android.exoplayer2.text com.google.android.exoplayer2.text ExoplayerCuesDecoder SubtitleDecoderFactory TextOutput TextRenderer
com.google.android.exoplayer2.upstream.crypto com.google.android.exoplayer2.upstream AesCipherDataSource AesCipherDataSink AesFlushingCipher com.google.android.exoplayer2.upstream.crypto com.google.android.exoplayer2.upstream AesCipherDataSource AesCipherDataSink AesFlushingCipher
com.google.android.exoplayer2.util com.google.android.exoplayer2.util AtomicFile Assertions BundleableUtil BundleUtil Clock ClosedSource CodecSpecificDataUtil ColorParser ConditionVariable Consumer CopyOnWriteMultiset EGLSurfaceTexture GlProgram GlUtil HandlerWrapper LibraryLoader ListenerSet Log LongArray MediaFormatUtil NetworkTypeObserver NonNullApi NotificationUtil ParsableBitArray ParsableByteArray RepeatModeUtil RunnableFutureTask Size SystemClock SystemHandlerWrapper TimedValueQueue TimestampAdjuster TraceUtil UnknownNull UnstableApi UriUtil Util XmlPullParserUtil com.google.android.exoplayer2.util com.google.android.exoplayer2.util AtomicFile Assertions BundleableUtil BundleUtil Clock ClosedSource CodecSpecificDataUtil ColorParser ConditionVariable Consumer CopyOnWriteMultiset EGLSurfaceTexture GlProgram GlUtil HandlerWrapper LibraryLoader ListenerSet Log LongArray MediaFormatUtil NetworkTypeObserver NonNullApi NotificationUtil ParsableBitArray ParsableByteArray RepeatModeUtil RunnableFutureTask Size SystemClock SystemHandlerWrapper TimedValueQueue TimestampAdjuster TraceUtil UnknownNull UnstableApi UriUtil Util XmlPullParserUtil
com.google.android.exoplayer2.util androidx.media3.common DebugViewProvider Effect ErrorMessageProvider FlagSet FileTypes FrameInfo FrameProcessingException FrameProcessor MimeTypes PriorityTaskManager SurfaceInfo com.google.android.exoplayer2.util androidx.media3.common DebugViewProvider Effect ErrorMessageProvider FlagSet FileTypes FrameInfo VideoFrameProcessingException VideoFrameProcessor MimeTypes PriorityTaskManager SurfaceInfo
com.google.android.exoplayer2.metadata androidx.media3.common Metadata com.google.android.exoplayer2.metadata androidx.media3.common Metadata
com.google.android.exoplayer2.metadata com.google.android.exoplayer2.metadata MetadataDecoderFactory MetadataOutput MetadataRenderer com.google.android.exoplayer2.metadata com.google.android.exoplayer2.metadata MetadataDecoderFactory MetadataOutput MetadataRenderer
com.google.android.exoplayer2.audio androidx.media3.common AudioAttributes AuxEffectInfo com.google.android.exoplayer2.audio androidx.media3.common AudioAttributes AuxEffectInfo
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment