Commit cf768329 by huangdarwin Committed by christosts

Effect: Rename FrameProcessor

Rename FrameProcessor to VideoFrameProcessor, and GlEffectsFrameProcessor to
DefaultVideoFrameProcessor.

Most changes are semi-mechanical, semi-manual find-replace, preserving case:
* "FrameProc" -> "VideoFrameProc" (ex. FrameProcessor -> VideoFrameProcessor, and
   FrameProcessingException -> VideoFrameProcessingException)
* "GlEffectsVideoFrameProc" -> "DefaultVideoFrameProc"

PiperOrigin-RevId: 509887384
parent 41a03dd8
Showing with 483 additions and 470 deletions
......@@ -19,7 +19,7 @@ import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context;
import android.opengl.GLES20;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
......@@ -59,7 +59,7 @@ import java.io.IOException;
* @param minInnerRadius The lower bound of the radius that is unaffected by the effect.
* @param maxInnerRadius The upper bound of the radius that is unaffected by the effect.
* @param outerRadius The radius after which all pixels are black.
* @throws FrameProcessingException If a problem occurs while reading shader files.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/
public PeriodicVignetteShaderProgram(
Context context,
......@@ -69,7 +69,7 @@ import java.io.IOException;
float minInnerRadius,
float maxInnerRadius,
float outerRadius)
throws FrameProcessingException {
throws VideoFrameProcessingException {
super(useHdr);
checkArgument(minInnerRadius <= maxInnerRadius);
checkArgument(maxInnerRadius <= outerRadius);
......@@ -78,7 +78,7 @@ import java.io.IOException;
try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
glProgram.setFloatsUniform("uCenter", new float[] {centerX, centerY});
glProgram.setFloatsUniform("uOuterRadius", new float[] {outerRadius});
......@@ -95,7 +95,8 @@ import java.io.IOException;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try {
glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
......@@ -107,17 +108,17 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs);
throw new VideoFrameProcessingException(e, presentationTimeUs);
}
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
super.release();
try {
glProgram.delete();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
}
......@@ -24,7 +24,7 @@ import android.content.Context;
import android.opengl.EGL14;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.LibraryLoader;
import androidx.media3.common.util.Util;
import androidx.media3.effect.GlShaderProgram;
......@@ -112,7 +112,7 @@ import java.util.concurrent.Future;
futures = new ArrayDeque<>();
inputListener = new InputListener() {};
outputListener = new OutputListener() {};
errorListener = (frameProcessingException) -> {};
errorListener = (videoFrameProcessingException) -> {};
errorListenerExecutor = MoreExecutors.directExecutor();
EglManager eglManager = new EglManager(EGL14.eglGetCurrentContext());
frameProcessor =
......@@ -155,7 +155,7 @@ import java.util.concurrent.Future;
frameProcessor.setAsynchronousErrorListener(
error ->
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(error))));
() -> errorListener.onError(new VideoFrameProcessingException(error))));
}
@Override
......@@ -191,7 +191,7 @@ import java.util.concurrent.Future;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e)));
() -> errorListener.onError(new VideoFrameProcessingException(e)));
}
if (acceptedFrame) {
inputListener.onInputFrameProcessed(inputTexture);
......@@ -213,9 +213,7 @@ import java.util.concurrent.Future;
Thread.currentThread().interrupt();
if (errorListener != null) {
errorListenerExecutor.execute(
() ->
errorListener.onFrameProcessingError(
new FrameProcessingException(e)));
() -> errorListener.onError(new VideoFrameProcessingException(e)));
}
}
}
......@@ -254,14 +252,12 @@ import java.util.concurrent.Future;
try {
if (!singleThreadExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS)) {
errorListenerExecutor.execute(
() ->
errorListener.onFrameProcessingError(
new FrameProcessingException("Release timed out")));
() -> errorListener.onError(new VideoFrameProcessingException("Release timed out")));
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e)));
() -> errorListener.onError(new VideoFrameProcessingException(e)));
}
frameProcessor.close();
......@@ -294,11 +290,11 @@ import java.util.concurrent.Future;
futures.remove().get();
} catch (ExecutionException e) {
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e)));
() -> errorListener.onError(new VideoFrameProcessingException(e)));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e)));
() -> errorListener.onError(new VideoFrameProcessingException(e)));
}
}
}
......
......@@ -230,10 +230,10 @@ public class PlaybackException extends Exception implements Bundleable {
// Frame processing errors (7xxx).
/** Caused by a failure when initializing a {@link FrameProcessor}. */
@UnstableApi public static final int ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED = 7000;
/** Caused by a failure when processing a frame. */
@UnstableApi public static final int ERROR_CODE_FRAME_PROCESSING_FAILED = 7001;
/** Caused by a failure when initializing a {@link VideoFrameProcessor}. */
@UnstableApi public static final int ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED = 7000;
/** Caused by a failure when processing a video frame. */
@UnstableApi public static final int ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED = 7001;
/**
* Player implementations that want to surface custom errors can use error codes greater than this
......@@ -312,10 +312,10 @@ public class PlaybackException extends Exception implements Bundleable {
return "ERROR_CODE_DRM_DEVICE_REVOKED";
case ERROR_CODE_DRM_LICENSE_EXPIRED:
return "ERROR_CODE_DRM_LICENSE_EXPIRED";
case ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED:
return "ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED";
case ERROR_CODE_FRAME_PROCESSING_FAILED:
return "ERROR_CODE_FRAME_PROCESSING_FAILED";
case ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED:
return "ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED";
case ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED:
return "ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED";
default:
if (errorCode >= CUSTOM_ERROR_CODE_BASE) {
return "custom error code";
......
......@@ -22,25 +22,26 @@ import androidx.media3.common.util.UnstableApi;
* to video frames.
*/
@UnstableApi
public final class FrameProcessingException extends Exception {
public final class VideoFrameProcessingException extends Exception {
/**
* Wraps the given exception in a {@code FrameProcessingException} if it is not already a {@code
* FrameProcessingException} and returns the exception otherwise.
* Wraps the given exception in a {@code VideoFrameProcessingException} if it is not already a
* {@code VideoFrameProcessingException} and returns the exception otherwise.
*/
public static FrameProcessingException from(Exception exception) {
public static VideoFrameProcessingException from(Exception exception) {
return from(exception, /* presentationTimeUs= */ C.TIME_UNSET);
}
/**
* Wraps the given exception in a {@code FrameProcessingException} with the given timestamp if it
* is not already a {@code FrameProcessingException} and returns the exception otherwise.
* Wraps the given exception in a {@code VideoFrameProcessingException} with the given timestamp
* if it is not already a {@code VideoFrameProcessingException} and returns the exception
* otherwise.
*/
public static FrameProcessingException from(Exception exception, long presentationTimeUs) {
if (exception instanceof FrameProcessingException) {
return (FrameProcessingException) exception;
public static VideoFrameProcessingException from(Exception exception, long presentationTimeUs) {
if (exception instanceof VideoFrameProcessingException) {
return (VideoFrameProcessingException) exception;
} else {
return new FrameProcessingException(exception, presentationTimeUs);
return new VideoFrameProcessingException(exception, presentationTimeUs);
}
}
......@@ -55,7 +56,7 @@ public final class FrameProcessingException extends Exception {
*
* @param message The detail message for this exception.
*/
public FrameProcessingException(String message) {
public VideoFrameProcessingException(String message) {
this(message, /* presentationTimeUs= */ C.TIME_UNSET);
}
......@@ -65,7 +66,7 @@ public final class FrameProcessingException extends Exception {
* @param message The detail message for this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/
public FrameProcessingException(String message, long presentationTimeUs) {
public VideoFrameProcessingException(String message, long presentationTimeUs) {
super(message);
this.presentationTimeUs = presentationTimeUs;
}
......@@ -76,7 +77,7 @@ public final class FrameProcessingException extends Exception {
* @param message The detail message for this exception.
* @param cause The cause of this exception.
*/
public FrameProcessingException(String message, Throwable cause) {
public VideoFrameProcessingException(String message, Throwable cause) {
this(message, cause, /* presentationTimeUs= */ C.TIME_UNSET);
}
......@@ -87,7 +88,7 @@ public final class FrameProcessingException extends Exception {
* @param cause The cause of this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/
public FrameProcessingException(String message, Throwable cause, long presentationTimeUs) {
public VideoFrameProcessingException(String message, Throwable cause, long presentationTimeUs) {
super(message, cause);
this.presentationTimeUs = presentationTimeUs;
}
......@@ -97,7 +98,7 @@ public final class FrameProcessingException extends Exception {
*
* @param cause The cause of this exception.
*/
public FrameProcessingException(Throwable cause) {
public VideoFrameProcessingException(Throwable cause) {
this(cause, /* presentationTimeUs= */ C.TIME_UNSET);
}
......@@ -107,7 +108,7 @@ public final class FrameProcessingException extends Exception {
* @param cause The cause of this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/
public FrameProcessingException(Throwable cause, long presentationTimeUs) {
public VideoFrameProcessingException(Throwable cause, long presentationTimeUs) {
super(cause);
this.presentationTimeUs = presentationTimeUs;
}
......
......@@ -25,7 +25,7 @@ import java.util.List;
import java.util.concurrent.Executor;
/**
* Interface for a frame processor that applies changes to individual video frames.
* Interface for a video frame processor that applies changes to individual video frames.
*
* <p>The changes are specified by {@link Effect} instances passed to {@link Factory#create}.
*
......@@ -37,13 +37,13 @@ import java.util.concurrent.Executor;
* to the input {@link Surface}.
*/
@UnstableApi
public interface FrameProcessor {
public interface VideoFrameProcessor {
// TODO(b/243036513): Allow effects to be replaced.
/** A factory for {@link FrameProcessor} instances. */
/** A factory for {@link VideoFrameProcessor} instances. */
interface Factory {
/**
* Creates a new {@link FrameProcessor} instance.
* Creates a new {@link VideoFrameProcessor} instance.
*
* @param context A {@link Context}.
* @param effects The {@link Effect} instances to apply to each frame. Applied on the {@code
......@@ -55,18 +55,18 @@ public interface FrameProcessor {
* video) or not (e.g. from a {@link Bitmap}). See <a
* href="https://source.android.com/docs/core/graphics/arch-st#ext_texture">the
* SurfaceTexture docs</a> for more information on external textures.
* @param releaseFramesAutomatically If {@code true}, the {@link FrameProcessor} will render
* output frames to the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface}
* automatically as {@link FrameProcessor} is done processing them. If {@code false}, the
* {@link FrameProcessor} will block until {@link #releaseOutputFrame(long)} is called, to
* @param releaseFramesAutomatically If {@code true}, the instance will render output frames to
* the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as
* {@link VideoFrameProcessor} is done processing them. If {@code false}, the {@link
* VideoFrameProcessor} will block until {@link #releaseOutputFrame(long)} is called, to
* render or drop the frame.
* @param executor The {@link Executor} on which the {@code listener} is invoked.
* @param listener A {@link Listener}.
* @return A new instance.
* @throws FrameProcessingException If a problem occurs while creating the {@link
* FrameProcessor}.
* @throws VideoFrameProcessingException If a problem occurs while creating the {@link
* VideoFrameProcessor}.
*/
FrameProcessor create(
VideoFrameProcessor create(
Context context,
List<Effect> effects,
DebugViewProvider debugViewProvider,
......@@ -76,7 +76,7 @@ public interface FrameProcessor {
boolean releaseFramesAutomatically,
Executor executor,
Listener listener)
throws FrameProcessingException;
throws VideoFrameProcessingException;
}
/**
......@@ -106,15 +106,15 @@ public interface FrameProcessor {
void onOutputFrameAvailable(long presentationTimeUs);
/**
* Called when an exception occurs during asynchronous frame processing.
* Called when an exception occurs during asynchronous video frame processing.
*
* <p>If an error occurred, consuming and producing further frames will not work as expected and
* the {@link FrameProcessor} should be released.
* the {@link VideoFrameProcessor} should be released.
*/
void onFrameProcessingError(FrameProcessingException exception);
void onError(VideoFrameProcessingException exception);
/** Called after the {@link FrameProcessor} has produced its final output frame. */
void onFrameProcessingEnded();
/** Called after the {@link VideoFrameProcessor} has produced its final output frame. */
void onEnded();
}
/**
......@@ -127,14 +127,14 @@ public interface FrameProcessor {
long DROP_OUTPUT_FRAME = -2;
/**
* Provides an input {@link Bitmap} to the {@link FrameProcessor}.
* Provides an input {@link Bitmap} to the {@code VideoFrameProcessor}.
*
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code
* <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code false}.
*
* <p>Can be called on any thread.
*
* @param inputBitmap The {@link Bitmap} queued to the {@link FrameProcessor}.
* @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
* second.
......@@ -144,9 +144,10 @@ public interface FrameProcessor {
void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate);
/**
* Returns the input {@link Surface}, where {@link FrameProcessor} consumes input frames from.
* Returns the input {@link Surface}, where {@code VideoFrameProcessor} consumes input frames
* from.
*
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code
* <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}.
*
* <p>Can be called on any thread.
......@@ -171,11 +172,11 @@ public interface FrameProcessor {
void setInputFrameInfo(FrameInfo inputFrameInfo);
/**
* Informs the {@code FrameProcessor} that a frame will be queued to its input surface.
* Informs the {@code VideoFrameProcessor} that a frame will be queued to its input surface.
*
* <p>Must be called before rendering a frame to the frame processor's input surface.
* <p>Must be called before rendering a frame to the {@code VideoFrameProcessor}'s input surface.
*
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code
* <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}.
*
* <p>Can be called on any thread.
......@@ -189,7 +190,7 @@ public interface FrameProcessor {
* Returns the number of input frames that have been {@linkplain #registerInputFrame() registered}
* but not processed off the {@linkplain #getInputSurface() input surface} yet.
*
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code
* <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}.
*
* <p>Can be called on any thread.
......@@ -201,7 +202,7 @@ public interface FrameProcessor {
* dropped, they will be rendered to this output {@link SurfaceInfo}.
*
* <p>The new output {@link SurfaceInfo} is applied from the next output frame rendered onwards.
* If the output {@link SurfaceInfo} is {@code null}, the {@code FrameProcessor} will stop
* If the output {@link SurfaceInfo} is {@code null}, the {@code VideoFrameProcessor} will stop
* rendering pending frames and resume rendering once a non-null {@link SurfaceInfo} is set.
*
* <p>If the dimensions given in {@link SurfaceInfo} do not match the {@linkplain
......@@ -235,7 +236,7 @@ public interface FrameProcessor {
void releaseOutputFrame(long releaseTimeNs);
/**
* Informs the {@code FrameProcessor} that no further input frames should be accepted.
* Informs the {@code VideoFrameProcessor} that no further input frames should be accepted.
*
* <p>Can be called on any thread.
*
......@@ -244,12 +245,12 @@ public interface FrameProcessor {
void signalEndOfInput();
/**
* Flushes the {@code FrameProcessor}.
* Flushes the {@code VideoFrameProcessor}.
*
* <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this
* method are no longer considered to be registered when this method returns.
*
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code
* <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}.
*
* <p>{@link Listener} methods invoked prior to calling this method should be ignored.
......@@ -259,10 +260,9 @@ public interface FrameProcessor {
/**
* Releases all resources.
*
* <p>If the frame processor is released before it has {@linkplain
* Listener#onFrameProcessingEnded() ended}, it will attempt to cancel processing any input frames
* that have already become available. Input frames that become available after release are
* ignored.
* <p>If the {@code VideoFrameProcessor} is released before it has {@linkplain Listener#onEnded()
* ended}, it will attempt to cancel processing any input frames that have already become
* available. Input frames that become available after release are ignored.
*
* <p>This method blocks until all resources are released or releasing times out.
*
......
......@@ -33,7 +33,7 @@ import android.graphics.Color;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil;
......@@ -50,7 +50,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public class ContrastPixelTest {
......@@ -89,7 +89,7 @@ public class ContrastPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (contrastShaderProgram != null) {
contrastShaderProgram.release();
}
......@@ -198,7 +198,7 @@ public class ContrastPixelTest {
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
private void setupOutputTexture(int outputWidth, int outputHeight) throws GlUtil.GlException {
private void setupOutputTexture(int outputWidth, int outputHeight) throws Exception {
int outputTexId =
GlUtil.createTexture(
outputWidth, outputHeight, /* useHighPrecisionColorComponents= */ false);
......
......@@ -30,7 +30,7 @@ import android.graphics.Bitmap;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil;
......@@ -48,7 +48,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class CropPixelTest {
......@@ -82,7 +82,7 @@ public final class CropPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (cropShaderProgram != null) {
cropShaderProgram.release();
}
......
......@@ -32,7 +32,7 @@ import android.graphics.Color;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil;
......@@ -50,7 +50,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class HslAdjustmentPixelTest {
......@@ -100,7 +100,7 @@ public final class HslAdjustmentPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (hslProcessor != null) {
hslProcessor.release();
}
......
......@@ -30,7 +30,7 @@ import android.graphics.Matrix;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.test.ext.junit.runners.AndroidJUnit4;
......@@ -47,7 +47,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class MatrixShaderProgramPixelTest {
......@@ -87,7 +87,7 @@ public final class MatrixShaderProgramPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) {
matrixShaderProgram.release();
}
......
......@@ -35,7 +35,7 @@ import android.opengl.Matrix;
import android.text.Spannable;
import android.text.SpannableString;
import android.text.style.ForegroundColorSpan;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil;
......@@ -54,7 +54,7 @@ import org.junit.runner.RunWith;
* <p>Expected bitmaps are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public class OverlayShaderProgramPixelTest {
......@@ -101,7 +101,7 @@ public class OverlayShaderProgramPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (overlayShaderProgram != null) {
overlayShaderProgram.release();
}
......
......@@ -31,7 +31,7 @@ import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.media3.common.C;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil;
......@@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class PresentationPixelTest {
......@@ -91,7 +91,7 @@ public final class PresentationPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (presentationShaderProgram != null) {
presentationShaderProgram.release();
}
......
......@@ -33,7 +33,7 @@ import android.graphics.Color;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil;
......@@ -52,7 +52,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class RgbAdjustmentPixelTest {
......@@ -99,7 +99,7 @@ public final class RgbAdjustmentPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) {
matrixShaderProgram.release();
}
......
......@@ -31,7 +31,7 @@ import android.graphics.Bitmap;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil;
......@@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class RgbFilterPixelTest {
......@@ -94,7 +94,7 @@ public final class RgbFilterPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) {
matrixShaderProgram.release();
}
......
......@@ -32,7 +32,7 @@ import android.graphics.Color;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil;
......@@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public class SingleColorLutPixelTest {
......@@ -88,7 +88,7 @@ public class SingleColorLutPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (colorLutShaderProgram != null) {
colorLutShaderProgram.release();
}
......
......@@ -21,7 +21,7 @@ import android.graphics.Bitmap;
import android.net.Uri;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.BitmapLoader;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
......@@ -44,9 +44,9 @@ public abstract class BitmapOverlay extends TextureOverlay {
* Returns the overlay bitmap displayed at the specified timestamp.
*
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame.
* @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/
public abstract Bitmap getBitmap(long presentationTimeUs) throws FrameProcessingException;
public abstract Bitmap getBitmap(long presentationTimeUs) throws VideoFrameProcessingException;
/**
* {@inheritDoc}
......@@ -61,7 +61,7 @@ public abstract class BitmapOverlay extends TextureOverlay {
}
@Override
public int getTextureId(long presentationTimeUs) throws FrameProcessingException {
public int getTextureId(long presentationTimeUs) throws VideoFrameProcessingException {
Bitmap bitmap = getBitmap(presentationTimeUs);
if (bitmap != lastBitmap) {
try {
......@@ -79,7 +79,7 @@ public abstract class BitmapOverlay extends TextureOverlay {
/* border= */ 0);
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
return lastTextureId;
......@@ -134,14 +134,14 @@ public abstract class BitmapOverlay extends TextureOverlay {
private @MonotonicNonNull Bitmap lastBitmap;
@Override
public Bitmap getBitmap(long presentationTimeUs) throws FrameProcessingException {
public Bitmap getBitmap(long presentationTimeUs) throws VideoFrameProcessingException {
if (lastBitmap == null) {
BitmapLoader bitmapLoader = new SimpleBitmapLoader();
ListenableFuture<Bitmap> future = bitmapLoader.loadBitmap(overlayBitmapUri);
try {
lastBitmap = future.get();
} catch (ExecutionException | InterruptedException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
return lastBitmap;
......
......@@ -35,7 +35,7 @@ import java.util.Queue;
private final GlShaderProgram producingGlShaderProgram;
private final GlShaderProgram consumingGlShaderProgram;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
@GuardedBy("this")
private final Queue<Pair<TextureInfo, Long>> availableFrames;
......@@ -50,18 +50,18 @@ import java.util.Queue;
* as {@link OutputListener}.
* @param consumingGlShaderProgram The {@link GlShaderProgram} for which this listener will be set
* as {@link InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor} that is used for
* OpenGL calls. All calls to the producing/consuming {@link GlShaderProgram} will be executed
* by the {@link FrameProcessingTaskExecutor}. The caller is responsible for releasing the
* {@link FrameProcessingTaskExecutor}.
* @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor} that is
* used for OpenGL calls. All calls to the producing/consuming {@link GlShaderProgram} will be
* executed by the {@link VideoFrameProcessingTaskExecutor}. The caller is responsible for
* releasing the {@link VideoFrameProcessingTaskExecutor}.
*/
public ChainingGlShaderProgramListener(
GlShaderProgram producingGlShaderProgram,
GlShaderProgram consumingGlShaderProgram,
FrameProcessingTaskExecutor frameProcessingTaskExecutor) {
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.producingGlShaderProgram = producingGlShaderProgram;
this.consumingGlShaderProgram = consumingGlShaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
availableFrames = new ArrayDeque<>();
}
......@@ -75,9 +75,10 @@ import java.util.Queue;
long presentationTimeUs = pendingFrame.second;
if (presentationTimeUs == C.TIME_END_OF_SOURCE) {
frameProcessingTaskExecutor.submit(consumingGlShaderProgram::signalEndOfCurrentInputStream);
videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
} else {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() ->
consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ pendingFrame.first, presentationTimeUs));
......@@ -86,7 +87,7 @@ import java.util.Queue;
@Override
public void onInputFrameProcessed(TextureInfo inputTexture) {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> producingGlShaderProgram.releaseOutputFrame(inputTexture));
}
......@@ -94,14 +95,14 @@ import java.util.Queue;
public synchronized void onFlush() {
consumingGlShaderProgramInputCapacity = 0;
availableFrames.clear();
frameProcessingTaskExecutor.submit(producingGlShaderProgram::flush);
videoFrameProcessingTaskExecutor.submit(producingGlShaderProgram::flush);
}
@Override
public synchronized void onOutputFrameAvailable(
TextureInfo outputTexture, long presentationTimeUs) {
if (consumingGlShaderProgramInputCapacity > 0) {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() ->
consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ outputTexture, presentationTimeUs));
......@@ -116,7 +117,8 @@ import java.util.Queue;
if (!availableFrames.isEmpty()) {
availableFrames.add(new Pair<>(TextureInfo.UNSET, C.TIME_END_OF_SOURCE));
} else {
frameProcessingTaskExecutor.submit(consumingGlShaderProgram::signalEndOfCurrentInputStream);
videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
}
}
}
......@@ -18,7 +18,7 @@ package androidx.media3.effect;
import android.content.Context;
import androidx.annotation.WorkerThread;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi;
......@@ -45,7 +45,7 @@ public interface ColorLut extends GlEffect {
@Override
@WorkerThread
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr);
}
}
......@@ -20,7 +20,7 @@ import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context;
import android.opengl.GLES20;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
......@@ -41,10 +41,10 @@ import java.io.IOException;
* @param colorLut The {@link ColorLut} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/
public ColorLutShaderProgram(Context context, ColorLut colorLut, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
super(useHdr);
// TODO(b/246315245): Add HDR support.
checkArgument(!useHdr, "ColorLutShaderProgram does not support HDR colors.");
......@@ -53,7 +53,7 @@ import java.io.IOException;
try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
......@@ -73,7 +73,8 @@ import java.io.IOException;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try {
glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
......@@ -84,18 +85,18 @@ import java.io.IOException;
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
super.release();
try {
colorLut.release();
glProgram.delete();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
}
......@@ -19,7 +19,7 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi;
/** A {@link GlEffect} to control the contrast of video frames. */
......@@ -42,7 +42,7 @@ public class Contrast implements GlEffect {
@Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return new ContrastShaderProgram(context, this, useHdr);
}
}
......@@ -18,7 +18,7 @@ package androidx.media3.effect;
import android.content.Context;
import android.opengl.GLES20;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
......@@ -38,10 +38,10 @@ import java.io.IOException;
* @param contrastEffect The {@link Contrast} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/
public ContrastShaderProgram(Context context, Contrast contrastEffect, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
super(useHdr);
// Use 1.0001f to avoid division by zero issues.
float contrastFactor = (1 + contrastEffect.contrast) / (1.0001f - contrastEffect.contrast);
......@@ -49,7 +49,7 @@ import java.io.IOException;
try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
......@@ -70,7 +70,8 @@ import java.io.IOException;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try {
glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
......@@ -79,17 +80,17 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs);
throw new VideoFrameProcessingException(e, presentationTimeUs);
}
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
super.release();
try {
glProgram.delete();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
}
......@@ -23,8 +23,8 @@ import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
import androidx.media3.common.C;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil;
import androidx.media3.effect.GlShaderProgram.InputListener;
import java.util.Queue;
......@@ -37,7 +37,7 @@ import java.util.concurrent.atomic.AtomicInteger;
*/
/* package */ final class ExternalTextureManager implements InputListener {
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private final ExternalShaderProgram externalShaderProgram;
private final int externalTexId;
private final Surface surface;
......@@ -61,7 +61,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Nullable private volatile FrameInfo currentFrame;
// TODO(b/238302341) Remove the use of after flush task, block the calling thread instead.
@Nullable private volatile FrameProcessingTask onFlushCompleteTask;
@Nullable private volatile VideoFrameProcessingTask onFlushCompleteTask;
private long previousStreamOffsetUs;
......@@ -70,21 +70,21 @@ import java.util.concurrent.atomic.AtomicInteger;
*
* @param externalShaderProgram The {@link ExternalShaderProgram} for which this {@code
* ExternalTextureManager} will be set as the {@link InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor}.
* @throws FrameProcessingException If a problem occurs while creating the external texture.
* @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor}.
* @throws VideoFrameProcessingException If a problem occurs while creating the external texture.
*/
// The onFrameAvailableListener will not be invoked until the constructor returns.
@SuppressWarnings("nullness:method.invocation.invalid")
public ExternalTextureManager(
ExternalShaderProgram externalShaderProgram,
FrameProcessingTaskExecutor frameProcessingTaskExecutor)
throws FrameProcessingException {
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor)
throws VideoFrameProcessingException {
this.externalShaderProgram = externalShaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
try {
externalTexId = GlUtil.createExternalTexture();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
surfaceTexture = new SurfaceTexture(externalTexId);
textureTransformMatrix = new float[16];
......@@ -93,7 +93,7 @@ import java.util.concurrent.atomic.AtomicInteger;
previousStreamOffsetUs = C.TIME_UNSET;
surfaceTexture.setOnFrameAvailableListener(
unused ->
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
if (numberOfFramesToDropOnBecomingAvailable > 0) {
numberOfFramesToDropOnBecomingAvailable--;
......@@ -119,7 +119,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Override
public void onReadyToAcceptInputFrame() {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
externalShaderProgramInputCapacity.incrementAndGet();
maybeQueueFrameToExternalShaderProgram();
......@@ -128,7 +128,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Override
public void onInputFrameProcessed(TextureInfo inputTexture) {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
currentFrame = null;
maybeQueueFrameToExternalShaderProgram();
......@@ -136,13 +136,13 @@ import java.util.concurrent.atomic.AtomicInteger;
}
/** Sets the task to run on completing flushing, or {@code null} to clear any task. */
public void setOnFlushCompleteListener(@Nullable FrameProcessingTask task) {
public void setOnFlushCompleteListener(@Nullable VideoFrameProcessingTask task) {
onFlushCompleteTask = task;
}
@Override
public void onFlush() {
frameProcessingTaskExecutor.submit(this::flush);
videoFrameProcessingTaskExecutor.submit(this::flush);
}
/**
......@@ -169,10 +169,10 @@ import java.util.concurrent.atomic.AtomicInteger;
/**
* Signals the end of the input.
*
* @see FrameProcessor#signalEndOfInput()
* @see VideoFrameProcessor#signalEndOfInput()
*/
public void signalEndOfInput() {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
inputStreamEnded = true;
if (pendingFrames.isEmpty() && currentFrame == null) {
......@@ -204,7 +204,7 @@ import java.util.concurrent.atomic.AtomicInteger;
if (onFlushCompleteTask == null || numberOfFramesToDropOnBecomingAvailable > 0) {
return;
}
frameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask);
videoFrameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask);
}
@WorkerThread
......
......@@ -19,14 +19,14 @@ import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context;
import androidx.annotation.IntRange;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi;
/**
* Caches the input frames.
*
* <p>Example usage: cache the processed frames when presenting them on screen, to accommodate for
* the possible fluctuation in frame processing time between frames.
* the possible fluctuation in video frame processing time between frames.
*/
@UnstableApi
public final class FrameCache implements GlEffect {
......@@ -51,7 +51,7 @@ public final class FrameCache implements GlEffect {
@Override
public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return new FrameCacheShaderProgram(context, capacity, useHdr);
}
}
......@@ -19,7 +19,7 @@ import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context;
import android.opengl.GLES20;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil;
import com.google.common.collect.Iterables;
......@@ -54,7 +54,7 @@ import java.util.concurrent.Executor;
/** Creates a new instance. */
public FrameCacheShaderProgram(Context context, int capacity, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
freeOutputTextures = new ArrayDeque<>();
inUseOutputTextures = new ArrayDeque<>();
try {
......@@ -64,7 +64,7 @@ import java.util.concurrent.Executor;
VERTEX_SHADER_TRANSFORMATION_ES2_PATH,
FRAGMENT_SHADER_TRANSFORMATION_ES2_PATH);
} catch (IOException | GlUtil.GlException e) {
throw FrameProcessingException.from(e);
throw VideoFrameProcessingException.from(e);
}
this.capacity = capacity;
this.useHdr = useHdr;
......@@ -80,7 +80,7 @@ import java.util.concurrent.Executor;
inputListener = new InputListener() {};
outputListener = new OutputListener() {};
errorListener = frameProcessingException -> {};
errorListener = videoFrameProcessingException -> {};
errorListenerExecutor = MoreExecutors.directExecutor();
}
......@@ -129,7 +129,7 @@ import java.util.concurrent.Executor;
outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs);
} catch (GlUtil.GlException | NoSuchElementException e) {
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(FrameProcessingException.from(e)));
() -> errorListener.onError(VideoFrameProcessingException.from(e)));
}
}
......@@ -167,11 +167,11 @@ import java.util.concurrent.Executor;
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
try {
deleteAllOutputTextures();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
......
......@@ -17,7 +17,7 @@ package androidx.media3.effect;
import android.content.Context;
import androidx.media3.common.Effect;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi;
/**
......@@ -36,10 +36,11 @@ public interface GlEffect extends Effect {
* @param context A {@link Context}.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If an error occurs while creating the {@link GlShaderProgram}.
* @throws VideoFrameProcessingException If an error occurs while creating the {@link
* GlShaderProgram}.
*/
GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException;
throws VideoFrameProcessingException;
/**
* Returns whether a {@link GlEffect} applies no change at every timestamp.
......
......@@ -17,7 +17,7 @@ package androidx.media3.effect;
import android.content.Context;
import android.opengl.Matrix;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.UnstableApi;
import com.google.common.collect.ImmutableList;
......@@ -54,7 +54,7 @@ public interface GlMatrixTransformation extends GlEffect {
@Override
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return MatrixShaderProgram.create(
context,
/* matrixTransformations= */ ImmutableList.of(this),
......
......@@ -15,7 +15,7 @@
*/
package androidx.media3.effect;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi;
import java.util.concurrent.Executor;
......@@ -47,7 +47,7 @@ import java.util.concurrent.Executor;
public interface GlShaderProgram {
/**
* Listener for input-related frame processing events.
* Listener for input-related video frame processing events.
*
* <p>This listener can be called from any thread.
*/
......@@ -81,7 +81,7 @@ public interface GlShaderProgram {
}
/**
* Listener for output-related frame processing events.
* Listener for output-related video frame processing events.
*
* <p>This listener can be called from any thread.
*/
......@@ -108,26 +108,26 @@ public interface GlShaderProgram {
}
/**
* Listener for frame processing errors.
* Listener for video frame processing errors.
*
* <p>This listener can be called from any thread.
*/
interface ErrorListener {
/**
* Called when an exception occurs during asynchronous frame processing.
* Called when an exception occurs during asynchronous video frame processing.
*
* <p>If an error occurred, consuming and producing further frames will not work as expected and
* the {@link GlShaderProgram} should be released.
*/
void onFrameProcessingError(FrameProcessingException e);
void onError(VideoFrameProcessingException e);
}
/**
* Sets the {@link InputListener}.
*
* <p>The {@link InputListener} should be invoked on the thread that owns the parent OpenGL
* context. For example, {@link GlEffectsFrameProcessor} invokes the {@link InputListener} methods
* on its internal thread.
* context. For example, {@link DefaultVideoFrameProcessor} invokes the {@link InputListener}
* methods on its internal thread.
*/
void setInputListener(InputListener inputListener);
......@@ -135,7 +135,7 @@ public interface GlShaderProgram {
* Sets the {@link OutputListener}.
*
* <p>The {@link OutputListener} should be invoked on the thread that owns the parent OpenGL
* context. For example, {@link GlEffectsFrameProcessor} invokes the {@link OutputListener}
* context. For example, {@link DefaultVideoFrameProcessor} invokes the {@link OutputListener}
* methods on its internal thread.
*/
void setOutputListener(OutputListener outputListener);
......@@ -190,7 +190,7 @@ public interface GlShaderProgram {
/**
* Releases all resources.
*
* @throws FrameProcessingException If an error occurs while releasing resources.
* @throws VideoFrameProcessingException If an error occurs while releasing resources.
*/
void release() throws FrameProcessingException;
void release() throws VideoFrameProcessingException;
}
......@@ -19,7 +19,7 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
......@@ -114,7 +114,7 @@ public class HslAdjustment implements GlEffect {
@Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return new HslShaderProgram(context, /* hslAdjustment= */ this, useHdr);
}
}
......@@ -20,7 +20,7 @@ import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context;
import android.opengl.GLES20;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
......@@ -40,10 +40,10 @@ import java.io.IOException;
* @param hslAdjustment The {@link HslAdjustment} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/
public HslShaderProgram(Context context, HslAdjustment hslAdjustment, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
super(useHdr);
// TODO(b/241241680): Check if HDR <-> HSL works the same or not.
checkArgument(!useHdr, "HDR is not yet supported.");
......@@ -51,7 +51,7 @@ import java.io.IOException;
try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
......@@ -78,7 +78,8 @@ import java.io.IOException;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try {
glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
......@@ -87,7 +88,7 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs);
throw new VideoFrameProcessingException(e, presentationTimeUs);
}
}
}
......@@ -23,22 +23,23 @@ import android.opengl.GLES20;
import android.opengl.GLUtils;
import androidx.annotation.WorkerThread;
import androidx.media3.common.C;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi;
import java.util.Queue;
import java.util.concurrent.LinkedBlockingQueue;
/**
* Forwards a frame produced from a {@link Bitmap} to a {@link GlShaderProgram} for consumption.
* Forwards a video frame produced from a {@link Bitmap} to a {@link GlShaderProgram} for
* consumption.
*
* <p>Methods in this class can be called from any thread.
*/
@UnstableApi
/* package */ final class InternalTextureManager implements GlShaderProgram.InputListener {
private final GlShaderProgram shaderProgram;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
// The queue holds all bitmaps with one or more frames pending to be sent downstream.
private final Queue<BitmapFrameSequenceInfo> pendingBitmaps;
......@@ -53,13 +54,14 @@ import java.util.concurrent.LinkedBlockingQueue;
*
* @param shaderProgram The {@link GlShaderProgram} for which this {@code InternalTextureManager}
* will be set as the {@link GlShaderProgram.InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor} that the methods of
* this class run on.
* @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor} that the
* methods of this class run on.
*/
public InternalTextureManager(
GlShaderProgram shaderProgram, FrameProcessingTaskExecutor frameProcessingTaskExecutor) {
GlShaderProgram shaderProgram,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.shaderProgram = shaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
pendingBitmaps = new LinkedBlockingQueue<>();
}
......@@ -69,7 +71,7 @@ import java.util.concurrent.LinkedBlockingQueue;
// program and change to only allocate one texId at a time. A change to the
// onInputFrameProcessed() method signature to include presentationTimeUs will probably be
// needed to do this.
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
downstreamShaderProgramCapacity++;
maybeQueueToShaderProgram();
......@@ -79,21 +81,21 @@ import java.util.concurrent.LinkedBlockingQueue;
/**
* Provides an input {@link Bitmap} to put into the video frames.
*
* @see FrameProcessor#queueInputBitmap
* @see VideoFrameProcessor#queueInputBitmap
*/
public void queueInputBitmap(
Bitmap inputBitmap, long durationUs, float frameRate, boolean useHdr) {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> setupBitmap(inputBitmap, durationUs, frameRate, useHdr));
}
/**
* Signals the end of the input.
*
* @see FrameProcessor#signalEndOfInput()
* @see VideoFrameProcessor#signalEndOfInput()
*/
public void signalEndOfInput() {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
inputEnded = true;
maybeSignalEndOfOutput();
......@@ -102,7 +104,7 @@ import java.util.concurrent.LinkedBlockingQueue;
@WorkerThread
private void setupBitmap(Bitmap bitmap, long durationUs, float frameRate, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
if (inputEnded) {
return;
......@@ -116,7 +118,7 @@ import java.util.concurrent.LinkedBlockingQueue;
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
throw FrameProcessingException.from(e);
throw VideoFrameProcessingException.from(e);
}
TextureInfo textureInfo =
new TextureInfo(
......
......@@ -24,7 +24,7 @@ import android.opengl.Matrix;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
......@@ -143,15 +143,15 @@ import java.util.List;
* @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be
* empty to apply no color transformations.
* @param useHdr Whether input and output colors are HDR.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL
* operation fails or is unsupported.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* OpenGL operation fails or is unsupported.
*/
public static MatrixShaderProgram create(
Context context,
List<GlMatrixTransformation> matrixTransformations,
List<RgbMatrix> rgbMatrices,
boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
GlProgram glProgram =
createGlProgram(
context, VERTEX_SHADER_TRANSFORMATION_PATH, FRAGMENT_SHADER_TRANSFORMATION_PATH);
......@@ -185,8 +185,8 @@ import java.util.List;
* @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}.
* If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain
* ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL
* operation fails or is unsupported.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* OpenGL operation fails or is unsupported.
*/
public static MatrixShaderProgram createWithInternalSampler(
Context context,
......@@ -194,7 +194,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo)
throws FrameProcessingException {
throws VideoFrameProcessingException {
checkState(
!ColorInfo.isTransferHdr(inputColorInfo),
"MatrixShaderProgram doesn't support HDR internal sampler input yet.");
......@@ -229,8 +229,8 @@ import java.util.List;
* @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}.
* If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain
* ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL
* operation fails or is unsupported.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* OpenGL operation fails or is unsupported.
*/
public static MatrixShaderProgram createWithExternalSampler(
Context context,
......@@ -238,7 +238,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo)
throws FrameProcessingException {
throws VideoFrameProcessingException {
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
String vertexShaderFilePath =
isInputTransferHdr
......@@ -272,15 +272,15 @@ import java.util.List;
* @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be
* empty to apply no color transformations.
* @param outputColorInfo The electrical (non-linear) {@link ColorInfo} describing output colors.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL
* operation fails or is unsupported.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* OpenGL operation fails or is unsupported.
*/
public static MatrixShaderProgram createApplyingOetf(
Context context,
List<GlMatrixTransformation> matrixTransformations,
List<RgbMatrix> rgbMatrices,
ColorInfo outputColorInfo)
throws FrameProcessingException {
throws VideoFrameProcessingException {
boolean outputIsHdr = ColorInfo.isTransferHdr(outputColorInfo);
String vertexShaderFilePath =
outputIsHdr ? VERTEX_SHADER_TRANSFORMATION_ES3_PATH : VERTEX_SHADER_TRANSFORMATION_PATH;
......@@ -317,7 +317,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo)
throws FrameProcessingException {
throws VideoFrameProcessingException {
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
@C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer;
if (isInputTransferHdr) {
......@@ -325,7 +325,7 @@ import java.util.List;
// In HDR editing mode the decoder output is sampled in YUV.
if (!GlUtil.isYuvTargetExtensionSupported()) {
throw new FrameProcessingException(
throw new VideoFrameProcessingException(
"The EXT_YUV_target extension is required for HDR editing input.");
}
glProgram.setFloatsUniform(
......@@ -398,13 +398,13 @@ import java.util.List;
private static GlProgram createGlProgram(
Context context, String vertexShaderFilePath, String fragmentShaderFilePath)
throws FrameProcessingException {
throws VideoFrameProcessingException {
GlProgram glProgram;
try {
glProgram = new GlProgram(context, vertexShaderFilePath, fragmentShaderFilePath);
} catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
float[] identityMatrix = GlUtil.create4x4IdentityMatrix();
......@@ -423,7 +423,8 @@ import java.util.List;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
updateCompositeRgbaMatrixArray(presentationTimeUs);
updateCompositeTransformationMatrixAndVisiblePolygon(presentationTimeUs);
if (visiblePolygon.size() < 3) {
......@@ -444,17 +445,17 @@ import java.util.List;
GLES20.GL_TRIANGLE_FAN, /* first= */ 0, /* count= */ visiblePolygon.size());
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs);
throw new VideoFrameProcessingException(e, presentationTimeUs);
}
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
super.release();
try {
glProgram.delete();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
......
......@@ -16,7 +16,7 @@
package androidx.media3.effect;
import android.content.Context;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi;
import com.google.common.collect.ImmutableList;
......@@ -40,7 +40,7 @@ public final class OverlayEffect implements GlEffect {
@Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return new OverlayShaderProgram(context, useHdr, overlays);
}
}
......@@ -21,7 +21,7 @@ import android.content.Context;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Pair;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
......@@ -49,11 +49,11 @@ import com.google.common.collect.ImmutableList;
* @param context The {@link Context}.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/
public OverlayShaderProgram(
Context context, boolean useHdr, ImmutableList<TextureOverlay> overlays)
throws FrameProcessingException {
throws VideoFrameProcessingException {
super(useHdr);
checkArgument(!useHdr, "OverlayShaderProgram does not support HDR colors yet.");
// The maximum number of samplers allowed in a single GL program is 16.
......@@ -70,7 +70,7 @@ import com.google.common.collect.ImmutableList;
glProgram =
new GlProgram(createVertexShader(overlays.size()), createFragmentShader(overlays.size()));
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
glProgram.setBufferAttribute(
......@@ -91,7 +91,8 @@ import com.google.common.collect.ImmutableList;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try {
glProgram.use();
if (!overlays.isEmpty()) {
......@@ -155,17 +156,17 @@ import com.google.common.collect.ImmutableList;
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs);
throw new VideoFrameProcessingException(e, presentationTimeUs);
}
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
super.release();
try {
glProgram.delete();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
......
......@@ -19,7 +19,7 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
......@@ -92,7 +92,7 @@ public class RgbFilter implements RgbMatrix {
@Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
checkForConsistentHdrSetting(useHdr);
return RgbMatrix.super.toGlShaderProgram(context, useHdr);
}
......
......@@ -17,7 +17,7 @@
package androidx.media3.effect;
import android.content.Context;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi;
import com.google.common.collect.ImmutableList;
......@@ -41,7 +41,7 @@ public interface RgbMatrix extends GlEffect {
@Override
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return MatrixShaderProgram.create(
context,
/* matrixTransformations= */ ImmutableList.of(),
......
......@@ -24,7 +24,7 @@ import android.graphics.Bitmap;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import androidx.media3.common.Format;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
......@@ -150,13 +150,13 @@ public class SingleColorLut implements ColorLut {
@Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
checkState(!useHdr, "HDR is currently not supported.");
try {
lutTextureId = storeLutAsTexture(lut);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException("Could not store the LUT as a texture.", e);
throw new VideoFrameProcessingException("Could not store the LUT as a texture.", e);
}
return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr);
......
......@@ -18,7 +18,7 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkState;
import androidx.annotation.CallSuper;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.UnstableApi;
......@@ -61,7 +61,7 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
this.useHdr = useHdr;
inputListener = new InputListener() {};
outputListener = new OutputListener() {};
errorListener = (frameProcessingException) -> {};
errorListener = (videoFrameProcessingException) -> {};
errorListenerExecutor = MoreExecutors.directExecutor();
}
......@@ -74,9 +74,10 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
* @param inputWidth The input width, in pixels.
* @param inputHeight The input height, in pixels.
* @return The output width and height of frames processed through {@link #drawFrame(int, long)}.
* @throws FrameProcessingException If an error occurs while configuring.
* @throws VideoFrameProcessingException If an error occurs while configuring.
*/
public abstract Size configure(int inputWidth, int inputHeight) throws FrameProcessingException;
public abstract Size configure(int inputWidth, int inputHeight)
throws VideoFrameProcessingException;
/**
* Draws one frame.
......@@ -90,10 +91,10 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
*
* @param inputTexId Identifier of a 2D OpenGL texture containing the input frame.
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame.
* @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/
public abstract void drawFrame(int inputTexId, long presentationTimeUs)
throws FrameProcessingException;
throws VideoFrameProcessingException;
@Override
public final void setInputListener(InputListener inputListener) {
......@@ -134,19 +135,19 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
drawFrame(inputTexture.texId, presentationTimeUs);
inputListener.onInputFrameProcessed(inputTexture);
outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs);
} catch (FrameProcessingException | GlUtil.GlException | RuntimeException e) {
} catch (VideoFrameProcessingException | GlUtil.GlException | RuntimeException e) {
errorListenerExecutor.execute(
() ->
errorListener.onFrameProcessingError(
e instanceof FrameProcessingException
? (FrameProcessingException) e
: new FrameProcessingException(e)));
errorListener.onError(
e instanceof VideoFrameProcessingException
? (VideoFrameProcessingException) e
: new VideoFrameProcessingException(e)));
}
}
@EnsuresNonNull("outputTexture")
private void configureOutputTexture(int inputWidth, int inputHeight)
throws GlUtil.GlException, FrameProcessingException {
throws GlUtil.GlException, VideoFrameProcessingException {
this.inputWidth = inputWidth;
this.inputHeight = inputHeight;
Size outputSize = configure(inputWidth, inputHeight);
......@@ -184,12 +185,12 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
@Override
@CallSuper
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
if (outputTexture != null) {
try {
GlUtil.deleteTexture(outputTexture.texId);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
}
......
......@@ -15,7 +15,7 @@
*/
package androidx.media3.effect;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.UnstableApi;
......@@ -26,9 +26,9 @@ public abstract class TextureOverlay {
* Returns the overlay texture identifier displayed at the specified timestamp.
*
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame.
* @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/
public abstract int getTextureId(long presentationTimeUs) throws FrameProcessingException;
public abstract int getTextureId(long presentationTimeUs) throws VideoFrameProcessingException;
// This method is required to find the size of a texture given a texture identifier using OpenGL
// ES 2.0. OpenGL ES 3.1 can do this with glGetTexLevelParameteriv().
......
......@@ -15,14 +15,14 @@
*/
package androidx.media3.effect;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
/**
* Interface for tasks that may throw a {@link GlUtil.GlException} or {@link
* FrameProcessingException}.
* VideoFrameProcessingException}.
*/
/* package */ interface FrameProcessingTask {
/* package */ interface VideoFrameProcessingTask {
/** Runs the task. */
void run() throws FrameProcessingException, GlUtil.GlException;
void run() throws VideoFrameProcessingException, GlUtil.GlException;
}
......@@ -19,8 +19,8 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import java.util.ArrayDeque;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
......@@ -29,36 +29,36 @@ import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
/**
* Wrapper around a single thread {@link ExecutorService} for executing {@link FrameProcessingTask}
* instances.
* Wrapper around a single thread {@link ExecutorService} for executing {@link
* VideoFrameProcessingTask} instances.
*
* <p>Public methods can be called from any thread.
*
* <p>The wrapper handles calling {@link
* FrameProcessor.Listener#onFrameProcessingError(FrameProcessingException)} for errors that occur
* during these tasks. The listener is invoked from the {@link ExecutorService}. Errors are assumed
* to be non-recoverable, so the {@code FrameProcessingTaskExecutor} should be released if an error
* VideoFrameProcessor.Listener#onError(VideoFrameProcessingException)} for errors that occur during
* these tasks. The listener is invoked from the {@link ExecutorService}. Errors are assumed to be
* non-recoverable, so the {@code VideoFrameProcessingTaskExecutor} should be released if an error
* occurs.
*
* <p>{@linkplain #submitWithHighPriority(FrameProcessingTask) High priority tasks} are always
* executed before {@linkplain #submit(FrameProcessingTask) default priority tasks}. Tasks with
* <p>{@linkplain #submitWithHighPriority(VideoFrameProcessingTask) High priority tasks} are always
* executed before {@linkplain #submit(VideoFrameProcessingTask) default priority tasks}. Tasks with
* equal priority are executed in FIFO order.
*/
/* package */ final class FrameProcessingTaskExecutor {
/* package */ final class VideoFrameProcessingTaskExecutor {
private final ExecutorService singleThreadExecutorService;
private final FrameProcessor.Listener listener;
private final VideoFrameProcessor.Listener listener;
private final Object lock;
@GuardedBy("lock")
private final ArrayDeque<FrameProcessingTask> highPriorityTasks;
private final ArrayDeque<VideoFrameProcessingTask> highPriorityTasks;
@GuardedBy("lock")
private boolean shouldCancelTasks;
/** Creates a new instance. */
public FrameProcessingTaskExecutor(
ExecutorService singleThreadExecutorService, FrameProcessor.Listener listener) {
public VideoFrameProcessingTaskExecutor(
ExecutorService singleThreadExecutorService, VideoFrameProcessor.Listener listener) {
this.singleThreadExecutorService = singleThreadExecutorService;
this.listener = listener;
lock = new Object();
......@@ -66,11 +66,11 @@ import java.util.concurrent.RejectedExecutionException;
}
/**
* Submits the given {@link FrameProcessingTask} to be executed after all pending tasks have
* Submits the given {@link VideoFrameProcessingTask} to be executed after all pending tasks have
* completed.
*/
@SuppressWarnings("FutureReturnValueIgnored")
public void submit(FrameProcessingTask task) {
public void submit(VideoFrameProcessingTask task) {
@Nullable RejectedExecutionException executionException = null;
synchronized (lock) {
if (shouldCancelTasks) {
......@@ -89,13 +89,13 @@ import java.util.concurrent.RejectedExecutionException;
}
/**
* Submits the given {@link FrameProcessingTask} to be executed after the currently running task
* and all previously submitted high-priority tasks have completed.
* Submits the given {@link VideoFrameProcessingTask} to be executed after the currently running
* task and all previously submitted high-priority tasks have completed.
*
* <p>Tasks that were previously {@linkplain #submit(FrameProcessingTask) submitted} without
* <p>Tasks that were previously {@linkplain #submit(VideoFrameProcessingTask) submitted} without
* high-priority and have not started executing will be executed after this task is complete.
*/
public void submitWithHighPriority(FrameProcessingTask task) {
public void submitWithHighPriority(VideoFrameProcessingTask task) {
synchronized (lock) {
if (shouldCancelTasks) {
return;
......@@ -111,7 +111,7 @@ import java.util.concurrent.RejectedExecutionException;
/**
* Flushes all scheduled tasks.
*
* <p>During flush, the {@code FrameProcessingTaskExecutor} ignores the {@linkplain #submit
* <p>During flush, the {@code VideoFrameProcessingTaskExecutor} ignores the {@linkplain #submit
* submission of new tasks}. The tasks that are submitted before flushing are either executed or
* canceled when this method returns.
*/
......@@ -137,12 +137,12 @@ import java.util.concurrent.RejectedExecutionException;
/**
* Cancels remaining tasks, runs the given release task, and shuts down the background thread.
*
* @param releaseTask A {@link FrameProcessingTask} to execute before shutting down the background
* thread.
* @param releaseTask A {@link VideoFrameProcessingTask} to execute before shutting down the
* background thread.
* @param releaseWaitTimeMs How long to wait for the release task to terminate, in milliseconds.
* @throws InterruptedException If interrupted while releasing resources.
*/
public void release(FrameProcessingTask releaseTask, long releaseWaitTimeMs)
public void release(VideoFrameProcessingTask releaseTask, long releaseWaitTimeMs)
throws InterruptedException {
synchronized (lock) {
shouldCancelTasks = true;
......@@ -153,16 +153,16 @@ import java.util.concurrent.RejectedExecutionException;
singleThreadExecutorService.shutdown();
try {
if (!singleThreadExecutorService.awaitTermination(releaseWaitTimeMs, MILLISECONDS)) {
listener.onFrameProcessingError(new FrameProcessingException("Release timed out"));
listener.onError(new VideoFrameProcessingException("Release timed out"));
}
releaseFuture.get();
} catch (ExecutionException e) {
listener.onFrameProcessingError(new FrameProcessingException(e));
listener.onError(new VideoFrameProcessingException(e));
}
}
private Future<?> wrapTaskAndSubmitToExecutorService(
FrameProcessingTask defaultPriorityTask, boolean isFlushOrReleaseTask) {
VideoFrameProcessingTask defaultPriorityTask, boolean isFlushOrReleaseTask) {
return singleThreadExecutorService.submit(
() -> {
try {
......@@ -172,7 +172,7 @@ import java.util.concurrent.RejectedExecutionException;
}
}
@Nullable FrameProcessingTask nextHighPriorityTask;
@Nullable VideoFrameProcessingTask nextHighPriorityTask;
while (true) {
synchronized (lock) {
// Lock only polling to prevent blocking the public method calls.
......@@ -199,6 +199,6 @@ import java.util.concurrent.RejectedExecutionException;
}
shouldCancelTasks = true;
}
listener.onFrameProcessingError(FrameProcessingException.from(exception));
listener.onError(VideoFrameProcessingException.from(exception));
}
}
......@@ -18,7 +18,7 @@ package androidx.media3.effect;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Util;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.After;
......@@ -30,20 +30,22 @@ import org.junit.runner.RunWith;
public final class ChainingGlShaderProgramListenerTest {
private static final long EXECUTOR_WAIT_TIME_MS = 100;
private final FrameProcessor.Listener mockFrameProcessorListener =
mock(FrameProcessor.Listener.class);
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor =
new FrameProcessingTaskExecutor(
private final VideoFrameProcessor.Listener mockFrameProcessorListener =
mock(VideoFrameProcessor.Listener.class);
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new VideoFrameProcessingTaskExecutor(
Util.newSingleThreadExecutor("Test"), mockFrameProcessorListener);
private final GlShaderProgram mockProducingGlShaderProgram = mock(GlShaderProgram.class);
private final GlShaderProgram mockConsumingGlShaderProgram = mock(GlShaderProgram.class);
private final ChainingGlShaderProgramListener chainingGlShaderProgramListener =
new ChainingGlShaderProgramListener(
mockProducingGlShaderProgram, mockConsumingGlShaderProgram, frameProcessingTaskExecutor);
mockProducingGlShaderProgram,
mockConsumingGlShaderProgram,
videoFrameProcessingTaskExecutor);
@After
public void release() throws InterruptedException {
frameProcessingTaskExecutor.release(/* releaseTask= */ () -> {}, EXECUTOR_WAIT_TIME_MS);
videoFrameProcessingTaskExecutor.release(/* releaseTask= */ () -> {}, EXECUTOR_WAIT_TIME_MS);
}
@Test
......
......@@ -30,9 +30,9 @@ import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Util;
import androidx.media3.effect.GlEffectsFrameProcessor;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.test.utils.DecodeOneFrameUtil;
import androidx.media3.test.utils.FrameProcessorTestRunner;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
......@@ -40,10 +40,10 @@ import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Instrumentation pixel-test for HDR to SDR tone-mapping via {@link GlEffectsFrameProcessor}.
* Instrumentation pixel-test for HDR to SDR tone-mapping via {@link DefaultVideoFrameProcessor}.
*
* <p>Uses a {@link GlEffectsFrameProcessor} to process one frame, and checks that the actual output
* matches expected output, either from a golden file or from another edit.
* <p>Uses a {@link DefaultVideoFrameProcessor} to process one frame, and checks that the actual
* output matches expected output, either from a golden file or from another edit.
*/
// TODO(b/263395272): Move this test to effects/mh tests.
@RunWith(AndroidJUnit4.class)
......@@ -75,12 +75,12 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
"OpenGL-based HDR to SDR tone mapping is unsupported below API 29.";
private static final String SKIP_REASON_NO_YUV = "Device lacks YUV extension support.";
private @MonotonicNonNull FrameProcessorTestRunner frameProcessorTestRunner;
private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
@After
public void release() {
if (frameProcessorTestRunner != null) {
frameProcessorTestRunner.release();
if (videoFrameProcessorTestRunner != null) {
videoFrameProcessorTestRunner.release();
}
}
......@@ -114,7 +114,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2)
.build();
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setVideoAssetPath(INPUT_HLG_MP4_ASSET_STRING)
.setInputColorInfo(hlgColor)
......@@ -124,7 +124,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
Bitmap actualBitmap;
try {
actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
} catch (UnsupportedOperationException e) {
if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
......@@ -177,7 +177,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2)
.build();
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING)
.setInputColorInfo(pqColor)
......@@ -187,7 +187,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
Bitmap actualBitmap;
try {
actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
} catch (UnsupportedOperationException e) {
if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
......@@ -209,10 +209,10 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.isAtMost(MAXIMUM_DEVICE_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
private FrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) {
return new FrameProcessorTestRunner.Builder()
return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setFrameProcessorFactory(new GlEffectsFrameProcessor.Factory());
.setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory());
}
}
......@@ -16,11 +16,11 @@
package androidx.media3.transformer;
import androidx.media3.common.Effect;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.MediaItem;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.audio.AudioProcessor;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.effect.GlEffectsFrameProcessor;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import com.google.common.collect.ImmutableList;
import java.util.List;
......@@ -45,19 +45,19 @@ public final class Effects {
*/
public final ImmutableList<Effect> videoEffects;
/**
* The {@link FrameProcessor.Factory} for the {@link FrameProcessor} to use when applying the
* {@code videoEffects} to the video frames.
* The {@link VideoFrameProcessor.Factory} for the {@link VideoFrameProcessor} to use when
* applying the {@code videoEffects} to the video frames.
*/
public final FrameProcessor.Factory frameProcessorFactory;
public final VideoFrameProcessor.Factory videoFrameProcessorFactory;
/**
* Creates an instance using a {@link GlEffectsFrameProcessor.Factory}.
* Creates an instance using a {@link DefaultVideoFrameProcessor.Factory}.
*
* <p>This is equivalent to calling {@link Effects#Effects(List, List, FrameProcessor.Factory)}
* with a {@link GlEffectsFrameProcessor.Factory}.
* <p>This is equivalent to calling {@link Effects#Effects(List, List,
* VideoFrameProcessor.Factory)} with a {@link DefaultVideoFrameProcessor.Factory}.
*/
public Effects(List<AudioProcessor> audioProcessors, List<Effect> videoEffects) {
this(audioProcessors, videoEffects, new GlEffectsFrameProcessor.Factory());
this(audioProcessors, videoEffects, new DefaultVideoFrameProcessor.Factory());
}
/**
......@@ -65,14 +65,14 @@ public final class Effects {
*
* @param audioProcessors The {@link #audioProcessors}.
* @param videoEffects The {@link #videoEffects}.
* @param frameProcessorFactory The {@link #frameProcessorFactory}.
* @param videoFrameProcessorFactory The {@link #videoFrameProcessorFactory}.
*/
public Effects(
List<AudioProcessor> audioProcessors,
List<Effect> videoEffects,
FrameProcessor.Factory frameProcessorFactory) {
VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.audioProcessors = ImmutableList.copyOf(audioProcessors);
this.videoEffects = ImmutableList.copyOf(videoEffects);
this.frameProcessorFactory = frameProcessorFactory;
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
}
}
......@@ -21,8 +21,8 @@ import android.os.SystemClock;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import androidx.media3.common.Format;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.audio.AudioProcessor.AudioFormat;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.UnstableApi;
......@@ -66,7 +66,7 @@ public final class TransformationException extends Exception {
ERROR_CODE_ENCODER_INIT_FAILED,
ERROR_CODE_ENCODING_FAILED,
ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED,
ERROR_CODE_FRAME_PROCESSING_FAILED,
ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED,
ERROR_CODE_AUDIO_PROCESSING_FAILED,
ERROR_CODE_MUXING_FAILED,
})
......@@ -151,8 +151,8 @@ public final class TransformationException extends Exception {
// Video editing errors (5xxx).
/** Caused by a frame processing failure. */
public static final int ERROR_CODE_FRAME_PROCESSING_FAILED = 5001;
/** Caused by a video frame processing failure. */
public static final int ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED = 5001;
// Audio processing errors (6xxx).
......@@ -182,7 +182,7 @@ public final class TransformationException extends Exception {
.put("ERROR_CODE_ENCODER_INIT_FAILED", ERROR_CODE_ENCODER_INIT_FAILED)
.put("ERROR_CODE_ENCODING_FAILED", ERROR_CODE_ENCODING_FAILED)
.put("ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED", ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED)
.put("ERROR_CODE_FRAME_PROCESSING_FAILED", ERROR_CODE_FRAME_PROCESSING_FAILED)
.put("ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED", ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED)
.put("ERROR_CODE_AUDIO_PROCESSING_FAILED", ERROR_CODE_AUDIO_PROCESSING_FAILED)
.put("ERROR_CODE_MUXING_FAILED", ERROR_CODE_MUXING_FAILED)
.buildOrThrow();
......@@ -271,15 +271,15 @@ public final class TransformationException extends Exception {
}
/**
* Creates an instance for a {@link FrameProcessor} related exception.
* Creates an instance for a {@link VideoFrameProcessor} related exception.
*
* @param cause The cause of the failure.
* @param errorCode See {@link #errorCode}.
* @return The created instance.
*/
/* package */ static TransformationException createForFrameProcessingException(
FrameProcessingException cause, int errorCode) {
return new TransformationException("Frame processing error", cause, errorCode);
/* package */ static TransformationException createForVideoFrameProcessingException(
VideoFrameProcessingException cause, int errorCode) {
return new TransformationException("Video frame processing error", cause, errorCode);
}
/**
......
......@@ -28,10 +28,10 @@ import androidx.annotation.VisibleForTesting;
import androidx.media3.common.C;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MediaLibraryInfo;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.audio.AudioProcessor;
import androidx.media3.common.audio.SonicAudioProcessor;
import androidx.media3.common.util.Clock;
......@@ -39,7 +39,7 @@ import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.common.util.ListenerSet;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import androidx.media3.effect.GlEffectsFrameProcessor;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.exoplayer.source.DefaultMediaSourceFactory;
import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
......@@ -89,7 +89,7 @@ public final class Transformer {
private boolean generateSilentAudio;
private ListenerSet<Transformer.Listener> listeners;
private AssetLoader.@MonotonicNonNull Factory assetLoaderFactory;
private FrameProcessor.Factory frameProcessorFactory;
private VideoFrameProcessor.Factory videoFrameProcessorFactory;
private Codec.EncoderFactory encoderFactory;
private Muxer.Factory muxerFactory;
private Looper looper;
......@@ -106,7 +106,7 @@ public final class Transformer {
transformationRequest = new TransformationRequest.Builder().build();
audioProcessors = ImmutableList.of();
videoEffects = ImmutableList.of();
frameProcessorFactory = new GlEffectsFrameProcessor.Factory();
videoFrameProcessorFactory = new DefaultVideoFrameProcessor.Factory();
encoderFactory = new DefaultEncoderFactory.Builder(this.context).build();
muxerFactory = new DefaultMuxer.Factory();
looper = Util.getCurrentOrMainLooper();
......@@ -126,7 +126,7 @@ public final class Transformer {
this.generateSilentAudio = transformer.generateSilentAudio;
this.listeners = transformer.listeners;
this.assetLoaderFactory = transformer.assetLoaderFactory;
this.frameProcessorFactory = transformer.frameProcessorFactory;
this.videoFrameProcessorFactory = transformer.videoFrameProcessorFactory;
this.encoderFactory = transformer.encoderFactory;
this.muxerFactory = transformer.muxerFactory;
this.looper = transformer.looper;
......@@ -298,13 +298,14 @@ public final class Transformer {
}
/**
* @deprecated Set the {@link FrameProcessor.Factory} in an {@link EditedMediaItem}, and pass it
* to {@link #start(EditedMediaItem, String)} instead.
* @deprecated Set the {@link VideoFrameProcessor.Factory} in an {@link EditedMediaItem}, and
* pass it to {@link #start(EditedMediaItem, String)} instead.
*/
@CanIgnoreReturnValue
@Deprecated
public Builder setFrameProcessorFactory(FrameProcessor.Factory frameProcessorFactory) {
this.frameProcessorFactory = frameProcessorFactory;
public Builder setFrameProcessorFactory(
VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
return this;
}
......@@ -450,7 +451,7 @@ public final class Transformer {
generateSilentAudio,
listeners,
assetLoaderFactory,
frameProcessorFactory,
videoFrameProcessorFactory,
encoderFactory,
muxerFactory,
looper,
......@@ -608,7 +609,7 @@ public final class Transformer {
private final boolean generateSilentAudio;
private final ListenerSet<Transformer.Listener> listeners;
private final AssetLoader.Factory assetLoaderFactory;
private final FrameProcessor.Factory frameProcessorFactory;
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
private final Codec.EncoderFactory encoderFactory;
private final Muxer.Factory muxerFactory;
private final Looper looper;
......@@ -629,7 +630,7 @@ public final class Transformer {
boolean generateSilentAudio,
ListenerSet<Listener> listeners,
AssetLoader.Factory assetLoaderFactory,
FrameProcessor.Factory frameProcessorFactory,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
Codec.EncoderFactory encoderFactory,
Muxer.Factory muxerFactory,
Looper looper,
......@@ -647,7 +648,7 @@ public final class Transformer {
this.generateSilentAudio = generateSilentAudio;
this.listeners = listeners;
this.assetLoaderFactory = assetLoaderFactory;
this.frameProcessorFactory = frameProcessorFactory;
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
this.encoderFactory = encoderFactory;
this.muxerFactory = muxerFactory;
this.looper = looper;
......@@ -844,7 +845,7 @@ public final class Transformer {
.setRemoveAudio(removeAudio)
.setRemoveVideo(removeVideo)
.setFlattenForSlowMotion(flattenForSlowMotion)
.setEffects(new Effects(audioProcessors, videoEffects, frameProcessorFactory))
.setEffects(new Effects(audioProcessors, videoEffects, videoFrameProcessorFactory))
.build();
start(editedMediaItem, path);
}
......
......@@ -498,7 +498,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
streamOffsetUs,
transformationRequest,
firstEditedMediaItem.effects.videoEffects,
firstEditedMediaItem.effects.frameProcessorFactory,
firstEditedMediaItem.effects.videoFrameProcessorFactory,
encoderFactory,
muxerWrapper,
/* errorConsumer= */ this::onError,
......
......@@ -37,10 +37,10 @@ import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Consumer;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.Util;
......@@ -58,8 +58,8 @@ import org.checkerframework.dataflow.qual.Pure;
/** MIME type to use for output video if the input type is not a video. */
private static final String DEFAULT_OUTPUT_MIME_TYPE = MimeTypes.VIDEO_H265;
private final FrameProcessor frameProcessor;
private final ColorInfo frameProcessorInputColor;
private final VideoFrameProcessor videoFrameProcessor;
private final ColorInfo videoFrameProcessorInputColor;
private final FrameInfo firstFrameInfo;
private final EncoderWrapper encoderWrapper;
......@@ -67,7 +67,7 @@ import org.checkerframework.dataflow.qual.Pure;
/**
* The timestamp of the last buffer processed before {@linkplain
* FrameProcessor.Listener#onFrameProcessingEnded() frame processing has ended}.
* VideoFrameProcessor.Listener#onEnded() frame processing has ended}.
*/
private volatile long finalFramePresentationTimeUs;
......@@ -78,7 +78,7 @@ import org.checkerframework.dataflow.qual.Pure;
long streamOffsetUs,
TransformationRequest transformationRequest,
ImmutableList<Effect> effects,
FrameProcessor.Factory frameProcessorFactory,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
Codec.EncoderFactory encoderFactory,
MuxerWrapper muxerWrapper,
Consumer<TransformationException> errorConsumer,
......@@ -122,12 +122,12 @@ import org.checkerframework.dataflow.qual.Pure;
ColorInfo encoderInputColor = encoderWrapper.getSupportedInputColor();
// If not tone mapping using OpenGL, the decoder will output the encoderInputColor,
// possibly by tone mapping.
frameProcessorInputColor =
videoFrameProcessorInputColor =
isGlToneMapping ? checkNotNull(firstInputFormat.colorInfo) : encoderInputColor;
// For consistency with the Android platform, OpenGL tone mapping outputs colors with
// C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as
// C.COLOR_TRANSFER_SDR to the encoder.
ColorInfo frameProcessorOutputColor =
ColorInfo videoFrameProcessorOutputColor =
isGlToneMapping
? new ColorInfo.Builder()
.setColorSpace(C.COLOR_SPACE_BT709)
......@@ -136,23 +136,23 @@ import org.checkerframework.dataflow.qual.Pure;
.build()
: encoderInputColor;
try {
frameProcessor =
frameProcessorFactory.create(
videoFrameProcessor =
videoFrameProcessorFactory.create(
context,
effects,
debugViewProvider,
frameProcessorInputColor,
frameProcessorOutputColor,
videoFrameProcessorInputColor,
videoFrameProcessorOutputColor,
MimeTypes.isVideo(firstInputFormat.sampleMimeType),
/* releaseFramesAutomatically= */ true,
MoreExecutors.directExecutor(),
new FrameProcessor.Listener() {
new VideoFrameProcessor.Listener() {
private long lastProcessedFramePresentationTimeUs;
@Override
public void onOutputSizeChanged(int width, int height) {
try {
checkNotNull(frameProcessor)
checkNotNull(videoFrameProcessor)
.setOutputSurfaceInfo(encoderWrapper.getSurfaceInfo(width, height));
} catch (TransformationException exception) {
errorConsumer.accept(exception);
......@@ -166,14 +166,15 @@ import org.checkerframework.dataflow.qual.Pure;
}
@Override
public void onFrameProcessingError(FrameProcessingException exception) {
public void onError(VideoFrameProcessingException exception) {
errorConsumer.accept(
TransformationException.createForFrameProcessingException(
exception, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED));
TransformationException.createForVideoFrameProcessingException(
exception,
TransformationException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED));
}
@Override
public void onFrameProcessingEnded() {
public void onEnded() {
VideoSamplePipeline.this.finalFramePresentationTimeUs =
lastProcessedFramePresentationTimeUs;
try {
......@@ -183,9 +184,9 @@ import org.checkerframework.dataflow.qual.Pure;
}
}
});
} catch (FrameProcessingException e) {
throw TransformationException.createForFrameProcessingException(
e, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED);
} catch (VideoFrameProcessingException e) {
throw TransformationException.createForVideoFrameProcessingException(
e, TransformationException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED);
}
// The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
int decodedWidth =
......@@ -206,43 +207,43 @@ import org.checkerframework.dataflow.qual.Pure;
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) {
frameProcessor.setInputFrameInfo(
videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(mediaItemOffsetUs).build());
}
@Override
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
frameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
}
@Override
public Surface getInputSurface() {
return frameProcessor.getInputSurface();
return videoFrameProcessor.getInputSurface();
}
@Override
public ColorInfo getExpectedInputColorInfo() {
return frameProcessorInputColor;
return videoFrameProcessorInputColor;
}
@Override
public void registerVideoFrame() {
frameProcessor.registerInputFrame();
videoFrameProcessor.registerInputFrame();
}
@Override
public int getPendingVideoFrameCount() {
return frameProcessor.getPendingInputFrameCount();
return videoFrameProcessor.getPendingInputFrameCount();
}
@Override
public void signalEndOfVideoInput() {
frameProcessor.signalEndOfInput();
videoFrameProcessor.signalEndOfInput();
}
@Override
public void release() {
frameProcessor.release();
videoFrameProcessor.release();
encoderWrapper.release();
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment