Commit 836c49e9 by huangdarwin Committed by Tianyi Feng

HDR: Implement DefaultVideoFrameProcessor texture output for tests.

Previously, we always used ImageReader to read from the output of DefaultVideoFrameProcessor, for pixel tests. This has a limitation of not being
able to read HDR contents, so that we couldn't support HDR pixel tests.

Reading from a texture allows us to use glReadPixels to read from
DefaultVideoFrameProcessor, and build upon this to implement HDR pixel tests. We do
still want tests for surface output though, because real use-cases only will output
to Surfaces.

Also, add some tests for outputting to textures, since this test infrastructure is
a bit complex.

PiperOrigin-RevId: 519786535
parent 736242d1
...@@ -36,6 +36,7 @@ import com.google.android.exoplayer2.util.DebugViewProvider; ...@@ -36,6 +36,7 @@ import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect; import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameInfo; import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.GlObjectsProvider; import com.google.android.exoplayer2.util.GlObjectsProvider;
import com.google.android.exoplayer2.util.GlTextureInfo;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.SurfaceInfo; import com.google.android.exoplayer2.util.SurfaceInfo;
...@@ -62,6 +63,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -62,6 +63,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/** A factory for {@link DefaultVideoFrameProcessor} instances. */ /** A factory for {@link DefaultVideoFrameProcessor} instances. */
public static final class Factory implements VideoFrameProcessor.Factory { public static final class Factory implements VideoFrameProcessor.Factory {
private GlObjectsProvider glObjectsProvider = GlObjectsProvider.DEFAULT; private GlObjectsProvider glObjectsProvider = GlObjectsProvider.DEFAULT;
private boolean outputToTexture;
/** /**
* {@inheritDoc} * {@inheritDoc}
...@@ -76,6 +78,19 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -76,6 +78,19 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
} }
/** /**
* Sets whether to output to a texture for testing.
*
* <p>Must be called before {@link #create}.
*
* <p>The default value is {@code false}.
*/
@VisibleForTesting
public Factory setOutputToTexture(boolean outputToTexture) {
this.outputToTexture = outputToTexture;
return this;
}
/**
* {@inheritDoc} * {@inheritDoc}
* *
* <p>All {@link Effect} instances must be {@link GlEffect} instances. * <p>All {@link Effect} instances must be {@link GlEffect} instances.
...@@ -151,7 +166,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -151,7 +166,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
singleThreadExecutorService, singleThreadExecutorService,
listenerExecutor, listenerExecutor,
listener, listener,
glObjectsProvider)); glObjectsProvider,
outputToTexture));
try { try {
return defaultVideoFrameProcessorFuture.get(); return defaultVideoFrameProcessorFuture.get();
...@@ -224,7 +240,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -224,7 +240,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/** Returns the task executor that runs video frame processing tasks. */ /** Returns the task executor that runs video frame processing tasks. */
@VisibleForTesting @VisibleForTesting
/* package */ VideoFrameProcessingTaskExecutor getTaskExecutor() { public VideoFrameProcessingTaskExecutor getTaskExecutor() {
return videoFrameProcessingTaskExecutor; return videoFrameProcessingTaskExecutor;
} }
...@@ -287,6 +303,20 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -287,6 +303,20 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
finalShaderProgramWrapper.setOutputSurfaceInfo(outputSurfaceInfo); finalShaderProgramWrapper.setOutputSurfaceInfo(outputSurfaceInfo);
} }
/**
* Gets the output {@link GlTextureInfo}.
*
* <p>Should only be called if {@code outputToTexture} is true, and after a frame is available, as
* reported by the output {@linkplain #setOutputSurfaceInfo surface}'s {@link
* SurfaceTexture#setOnFrameAvailableListener}. Returns {@code null} if an output texture is not
* yet available.
*/
@VisibleForTesting
@Nullable
public GlTextureInfo getOutputTextureInfo() {
return finalShaderProgramWrapper.getOutputTextureInfo();
}
@Override @Override
public void releaseOutputFrame(long releaseTimeNs) { public void releaseOutputFrame(long releaseTimeNs) {
checkState( checkState(
...@@ -380,7 +410,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -380,7 +410,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
ExecutorService singleThreadExecutorService, ExecutorService singleThreadExecutorService,
Executor executor, Executor executor,
Listener listener, Listener listener,
GlObjectsProvider glObjectsProvider) GlObjectsProvider glObjectsProvider,
boolean outputToTexture)
throws GlUtil.GlException, VideoFrameProcessingException { throws GlUtil.GlException, VideoFrameProcessingException {
checkState(Thread.currentThread().getName().equals(THREAD_NAME)); checkState(Thread.currentThread().getName().equals(THREAD_NAME));
...@@ -423,7 +454,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -423,7 +454,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
releaseFramesAutomatically, releaseFramesAutomatically,
executor, executor,
listener, listener,
glObjectsProvider); glObjectsProvider,
outputToTexture);
setGlObjectProviderOnShaderPrograms(shaderPrograms, glObjectsProvider); setGlObjectProviderOnShaderPrograms(shaderPrograms, glObjectsProvider);
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor = VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new VideoFrameProcessingTaskExecutor(singleThreadExecutorService, listener); new VideoFrameProcessingTaskExecutor(singleThreadExecutorService, listener);
...@@ -462,7 +494,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -462,7 +494,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
boolean releaseFramesAutomatically, boolean releaseFramesAutomatically,
Executor executor, Executor executor,
Listener listener, Listener listener,
GlObjectsProvider glObjectsProvider) GlObjectsProvider glObjectsProvider,
boolean outputToTexture)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
ImmutableList.Builder<GlShaderProgram> shaderProgramListBuilder = new ImmutableList.Builder<>(); ImmutableList.Builder<GlShaderProgram> shaderProgramListBuilder = new ImmutableList.Builder<>();
ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder = ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder =
...@@ -536,7 +569,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -536,7 +569,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
releaseFramesAutomatically, releaseFramesAutomatically,
executor, executor,
listener, listener,
glObjectsProvider)); glObjectsProvider,
outputToTexture));
return shaderProgramListBuilder.build(); return shaderProgramListBuilder.build();
} }
......
...@@ -32,6 +32,7 @@ import android.view.SurfaceHolder; ...@@ -32,6 +32,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView; import android.view.SurfaceView;
import androidx.annotation.GuardedBy; import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.DebugViewProvider; import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.GlObjectsProvider; import com.google.android.exoplayer2.util.GlObjectsProvider;
...@@ -52,8 +53,8 @@ import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; ...@@ -52,8 +53,8 @@ import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** /**
* Wrapper around a {@link DefaultShaderProgram} that writes to the provided output surface and * Wrapper around a {@link DefaultShaderProgram} that writes to the provided output surface and if
* optional debug surface view. * provided, the optional debug surface view or output texture.
* *
* <p>The wrapped {@link DefaultShaderProgram} applies the {@link GlMatrixTransformation} and {@link * <p>The wrapped {@link DefaultShaderProgram} applies the {@link GlMatrixTransformation} and {@link
* RgbMatrix} instances passed to the constructor, followed by any transformations needed to convert * RgbMatrix} instances passed to the constructor, followed by any transformations needed to convert
...@@ -82,6 +83,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -82,6 +83,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final float[] textureTransformMatrix; private final float[] textureTransformMatrix;
private final Queue<Long> streamOffsetUsQueue; private final Queue<Long> streamOffsetUsQueue;
private final Queue<Pair<GlTextureInfo, Long>> availableFrames; private final Queue<Pair<GlTextureInfo, Long>> availableFrames;
private final boolean outputToTexture;
private int inputWidth; private int inputWidth;
private int inputHeight; private int inputHeight;
...@@ -91,6 +93,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -91,6 +93,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private InputListener inputListener; private InputListener inputListener;
private @MonotonicNonNull Size outputSizeBeforeSurfaceTransformation; private @MonotonicNonNull Size outputSizeBeforeSurfaceTransformation;
@Nullable private SurfaceView debugSurfaceView; @Nullable private SurfaceView debugSurfaceView;
private @MonotonicNonNull GlTextureInfo outputTexture;
private boolean frameProcessingStarted; private boolean frameProcessingStarted;
private volatile boolean outputChanged; private volatile boolean outputChanged;
...@@ -117,7 +120,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -117,7 +120,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
boolean releaseFramesAutomatically, boolean releaseFramesAutomatically,
Executor videoFrameProcessorListenerExecutor, Executor videoFrameProcessorListenerExecutor,
VideoFrameProcessor.Listener videoFrameProcessorListener, VideoFrameProcessor.Listener videoFrameProcessorListener,
GlObjectsProvider glObjectsProvider) { GlObjectsProvider glObjectsProvider,
boolean outputToTexture) {
this.context = context; this.context = context;
this.matrixTransformations = matrixTransformations; this.matrixTransformations = matrixTransformations;
this.rgbMatrices = rgbMatrices; this.rgbMatrices = rgbMatrices;
...@@ -132,6 +136,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -132,6 +136,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.videoFrameProcessorListenerExecutor = videoFrameProcessorListenerExecutor; this.videoFrameProcessorListenerExecutor = videoFrameProcessorListenerExecutor;
this.videoFrameProcessorListener = videoFrameProcessorListener; this.videoFrameProcessorListener = videoFrameProcessorListener;
this.glObjectsProvider = glObjectsProvider; this.glObjectsProvider = glObjectsProvider;
this.outputToTexture = outputToTexture;
textureTransformMatrix = GlUtil.create4x4IdentityMatrix(); textureTransformMatrix = GlUtil.create4x4IdentityMatrix();
streamOffsetUsQueue = new ConcurrentLinkedQueue<>(); streamOffsetUsQueue = new ConcurrentLinkedQueue<>();
...@@ -202,7 +207,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -202,7 +207,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
videoFrameProcessorListenerExecutor.execute( videoFrameProcessorListenerExecutor.execute(
() -> videoFrameProcessorListener.onOutputFrameAvailable(offsetPresentationTimeUs)); () -> videoFrameProcessorListener.onOutputFrameAvailable(offsetPresentationTimeUs));
if (releaseFramesAutomatically) { if (releaseFramesAutomatically) {
renderFrameToSurfaces( renderFrame(
inputTexture, presentationTimeUs, /* releaseTimeNs= */ offsetPresentationTimeUs * 1000); inputTexture, presentationTimeUs, /* releaseTimeNs= */ offsetPresentationTimeUs * 1000);
} else { } else {
availableFrames.add(Pair.create(inputTexture, presentationTimeUs)); availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
...@@ -220,7 +225,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -220,7 +225,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
frameProcessingStarted = true; frameProcessingStarted = true;
checkState(!releaseFramesAutomatically); checkState(!releaseFramesAutomatically);
Pair<GlTextureInfo, Long> oldestAvailableFrame = availableFrames.remove(); Pair<GlTextureInfo, Long> oldestAvailableFrame = availableFrames.remove();
renderFrameToSurfaces( renderFrame(
/* inputTexture= */ oldestAvailableFrame.first, /* inputTexture= */ oldestAvailableFrame.first,
/* presentationTimeUs= */ oldestAvailableFrame.second, /* presentationTimeUs= */ oldestAvailableFrame.second,
releaseTimeNs); releaseTimeNs);
...@@ -258,6 +263,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -258,6 +263,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
defaultShaderProgram.release(); defaultShaderProgram.release();
} }
try { try {
if (outputTexture != null) {
GlUtil.deleteTexture(outputTexture.texId);
GlUtil.deleteFbo(outputTexture.fboId);
}
GlUtil.destroyEglSurface(eglDisplay, outputEglSurface); GlUtil.destroyEglSurface(eglDisplay, outputEglSurface);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e); throw new VideoFrameProcessingException(e);
...@@ -294,17 +303,24 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -294,17 +303,24 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.outputSurfaceInfo = outputSurfaceInfo; this.outputSurfaceInfo = outputSurfaceInfo;
} }
private void renderFrameToSurfaces( private void renderFrame(
GlTextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs) { GlTextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs) {
try { try {
maybeRenderFrameToOutputSurface(inputTexture, presentationTimeUs, releaseTimeNs); maybeRenderFrameToOutputSurface(inputTexture, presentationTimeUs, releaseTimeNs);
if (outputToTexture && defaultShaderProgram != null) {
renderFrameToOutputTexture(inputTexture, presentationTimeUs);
}
} catch (VideoFrameProcessingException | GlUtil.GlException e) { } catch (VideoFrameProcessingException | GlUtil.GlException e) {
videoFrameProcessorListenerExecutor.execute( videoFrameProcessorListenerExecutor.execute(
() -> () ->
videoFrameProcessorListener.onError( videoFrameProcessorListener.onError(
VideoFrameProcessingException.from(e, presentationTimeUs))); VideoFrameProcessingException.from(e, presentationTimeUs)));
} }
maybeRenderFrameToDebugSurface(inputTexture, presentationTimeUs); if (debugSurfaceViewWrapper != null && defaultShaderProgram != null) {
renderFrameToDebugSurface(inputTexture, presentationTimeUs);
}
inputListener.onInputFrameProcessed(inputTexture); inputListener.onInputFrameProcessed(inputTexture);
} }
...@@ -338,6 +354,21 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -338,6 +354,21 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface); EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
} }
private void renderFrameToOutputTexture(GlTextureInfo inputTexture, long presentationTimeUs)
throws GlUtil.GlException, VideoFrameProcessingException {
checkNotNull(outputTexture);
GlUtil.focusFramebufferUsingCurrentContext(
outputTexture.fboId, outputTexture.width, outputTexture.height);
GlUtil.clearOutputFrame();
checkNotNull(defaultShaderProgram).drawFrame(inputTexture.texId, presentationTimeUs);
}
@VisibleForTesting
@Nullable
/* package */ GlTextureInfo getOutputTextureInfo() {
return outputTexture;
}
/** /**
* Ensures the instance is configured. * Ensures the instance is configured.
* *
...@@ -408,7 +439,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -408,7 +439,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
outputChanged = false; outputChanged = false;
} }
if (defaultShaderProgram == null) { if (defaultShaderProgram == null) {
defaultShaderProgram = createDefaultShaderProgramForOutputSurface(outputSurfaceInfo); DefaultShaderProgram defaultShaderProgram =
createDefaultShaderProgramForOutputSurface(outputSurfaceInfo);
if (outputToTexture) {
configureOutputTexture(
checkNotNull(outputSizeBeforeSurfaceTransformation).getWidth(),
checkNotNull(outputSizeBeforeSurfaceTransformation).getHeight());
}
this.defaultShaderProgram = defaultShaderProgram;
} }
this.outputSurfaceInfo = outputSurfaceInfo; this.outputSurfaceInfo = outputSurfaceInfo;
...@@ -416,6 +454,18 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -416,6 +454,18 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return true; return true;
} }
private void configureOutputTexture(int outputWidth, int outputHeight) throws GlUtil.GlException {
if (outputTexture != null) {
GlUtil.deleteTexture(outputTexture.texId);
GlUtil.deleteFbo(outputTexture.fboId);
}
int outputTexId =
GlUtil.createTexture(
outputWidth, outputHeight, /* useHighPrecisionColorComponents= */ false);
outputTexture =
glObjectsProvider.createBuffersForTexture(outputTexId, outputWidth, outputHeight);
}
private DefaultShaderProgram createDefaultShaderProgramForOutputSurface( private DefaultShaderProgram createDefaultShaderProgramForOutputSurface(
SurfaceInfo outputSurfaceInfo) throws VideoFrameProcessingException { SurfaceInfo outputSurfaceInfo) throws VideoFrameProcessingException {
ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder = ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder =
...@@ -464,24 +514,22 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -464,24 +514,22 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return defaultShaderProgram; return defaultShaderProgram;
} }
private void maybeRenderFrameToDebugSurface(GlTextureInfo inputTexture, long presentationTimeUs) { private void renderFrameToDebugSurface(GlTextureInfo inputTexture, long presentationTimeUs) {
if (debugSurfaceViewWrapper == null || this.defaultShaderProgram == null) { DefaultShaderProgram defaultShaderProgram = checkNotNull(this.defaultShaderProgram);
return; SurfaceViewWrapper debugSurfaceViewWrapper = checkNotNull(this.debugSurfaceViewWrapper);
}
DefaultShaderProgram defaultShaderProgram = this.defaultShaderProgram;
try { try {
debugSurfaceViewWrapper.maybeRenderToSurfaceView( checkNotNull(debugSurfaceViewWrapper)
() -> { .maybeRenderToSurfaceView(
GlUtil.clearOutputFrame(); () -> {
@C.ColorTransfer GlUtil.clearOutputFrame();
int configuredColorTransfer = defaultShaderProgram.getOutputColorTransfer(); @C.ColorTransfer
defaultShaderProgram.setOutputColorTransfer( int configuredColorTransfer = defaultShaderProgram.getOutputColorTransfer();
checkNotNull(debugSurfaceViewWrapper).outputColorTransfer); defaultShaderProgram.setOutputColorTransfer(
defaultShaderProgram.drawFrame(inputTexture.texId, presentationTimeUs); debugSurfaceViewWrapper.outputColorTransfer);
defaultShaderProgram.setOutputColorTransfer(configuredColorTransfer); defaultShaderProgram.drawFrame(inputTexture.texId, presentationTimeUs);
}, defaultShaderProgram.setOutputColorTransfer(configuredColorTransfer);
glObjectsProvider); },
glObjectsProvider);
} catch (VideoFrameProcessingException | GlUtil.GlException e) { } catch (VideoFrameProcessingException | GlUtil.GlException e) {
Log.d(TAG, "Error rendering to debug preview", e); Log.d(TAG, "Error rendering to debug preview", e);
} }
......
...@@ -15,6 +15,9 @@ ...@@ -15,6 +15,9 @@
*/ */
package com.google.android.exoplayer2.effect; package com.google.android.exoplayer2.effect;
import static androidx.annotation.VisibleForTesting.PACKAGE_PRIVATE;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
...@@ -22,7 +25,8 @@ import com.google.android.exoplayer2.util.VideoFrameProcessingException; ...@@ -22,7 +25,8 @@ import com.google.android.exoplayer2.util.VideoFrameProcessingException;
* Interface for tasks that may throw a {@link GlUtil.GlException} or {@link * Interface for tasks that may throw a {@link GlUtil.GlException} or {@link
* VideoFrameProcessingException}. * VideoFrameProcessingException}.
*/ */
/* package */ interface VideoFrameProcessingTask { @VisibleForTesting(otherwise = PACKAGE_PRIVATE)
public interface VideoFrameProcessingTask {
/** Runs the task. */ /** Runs the task. */
void run() throws VideoFrameProcessingException, GlUtil.GlException; void run() throws VideoFrameProcessingException, GlUtil.GlException;
} }
...@@ -15,10 +15,12 @@ ...@@ -15,10 +15,12 @@
*/ */
package com.google.android.exoplayer2.effect; package com.google.android.exoplayer2.effect;
import static androidx.annotation.VisibleForTesting.PACKAGE_PRIVATE;
import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MILLISECONDS;
import androidx.annotation.GuardedBy; import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.util.VideoFrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor; import com.google.android.exoplayer2.util.VideoFrameProcessor;
import java.util.ArrayDeque; import java.util.ArrayDeque;
...@@ -44,7 +46,8 @@ import java.util.concurrent.RejectedExecutionException; ...@@ -44,7 +46,8 @@ import java.util.concurrent.RejectedExecutionException;
* executed before {@linkplain #submit(VideoFrameProcessingTask) default priority tasks}. Tasks with * executed before {@linkplain #submit(VideoFrameProcessingTask) default priority tasks}. Tasks with
* equal priority are executed in FIFO order. * equal priority are executed in FIFO order.
*/ */
/* package */ final class VideoFrameProcessingTaskExecutor { @VisibleForTesting(otherwise = PACKAGE_PRIVATE)
public final class VideoFrameProcessingTaskExecutor {
private final ExecutorService singleThreadExecutorService; private final ExecutorService singleThreadExecutorService;
private final VideoFrameProcessor.Listener listener; private final VideoFrameProcessor.Listener listener;
......
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer.mh;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.readBitmap;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import static com.google.common.truth.Truth.assertThat;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.effect.BitmapOverlay;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.effect.OverlayEffect;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner;
import com.google.android.exoplayer2.util.GlTextureInfo;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Pixel test for video frame processing, outputting to a texture, via {@link
* DefaultVideoFrameProcessor}.
*
* <p>Uses a {@link DefaultVideoFrameProcessor} to process one frame, and checks that the actual
* output matches expected output, either from a golden file or from another edit.
*/
// TODO(b/263395272): Move this test to effects/mh tests, and remove @TestOnly dependencies.
@RunWith(AndroidJUnit4.class)
public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
private static final String ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
private static final String BITMAP_OVERLAY_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png";
private static final String OVERLAY_PNG_ASSET_PATH = "media/bitmap/input_images/media3test.png";
/** Input video of which we only use the first frame. */
private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4";
private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
@After
public void release() {
checkNotNull(videoFrameProcessorTestRunner).release();
}
@Test
public void noEffects_matchesGoldenFile() throws Exception {
String testId = "noEffects_matchesGoldenFile";
videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
@Test
public void bitmapOverlay_matchesGoldenFile() throws Exception {
String testId = "bitmapOverlay_matchesGoldenFile";
Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH);
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(new OverlayEffect(ImmutableList.of(bitmapOverlay)))
.build();
Bitmap expectedBitmap = readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH);
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
// TODO(b/227624622): Add a test for HDR input after BitmapPixelTestUtil can read HDR bitmaps,
// using GlEffectWrapper to ensure usage of intermediate textures.
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) {
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory().setOutputToTexture(true);
return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
.setVideoAssetPath(INPUT_SDR_MP4_ASSET_STRING)
.setBitmapReaderFactory(new TextureBitmapReader.Factory());
}
/**
* {@inheritDoc}
*
* <p>Reads from an OpenGL texture. Only for use on physical devices.
*/
private static final class TextureBitmapReader
implements VideoFrameProcessorTestRunner.BitmapReader {
// TODO(b/239172735): This outputs an incorrect black output image on emulators.
public static final class Factory
implements VideoFrameProcessorTestRunner.BitmapReader.Factory {
@Override
public TextureBitmapReader create(
VideoFrameProcessor videoFrameProcessor, int width, int height) {
return new TextureBitmapReader((DefaultVideoFrameProcessor) videoFrameProcessor);
}
}
private final DefaultVideoFrameProcessor defaultVideoFrameProcessor;
private @MonotonicNonNull Bitmap outputBitmap;
private TextureBitmapReader(DefaultVideoFrameProcessor defaultVideoFrameProcessor) {
this.defaultVideoFrameProcessor = defaultVideoFrameProcessor;
}
@Override
public Surface getSurface() {
int texId;
try {
texId = GlUtil.createExternalTexture();
} catch (GlUtil.GlException e) {
throw new RuntimeException(e);
}
SurfaceTexture surfaceTexture = new SurfaceTexture(texId);
surfaceTexture.setOnFrameAvailableListener(this::onSurfaceTextureFrameAvailable);
return new Surface(surfaceTexture);
}
@Override
public Bitmap getBitmap() {
return checkStateNotNull(outputBitmap);
}
private void onSurfaceTextureFrameAvailable(SurfaceTexture surfaceTexture) {
defaultVideoFrameProcessor
.getTaskExecutor()
.submitWithHighPriority(this::getBitmapFromTexture);
}
private void getBitmapFromTexture() throws GlUtil.GlException {
GlTextureInfo outputTexture = checkNotNull(defaultVideoFrameProcessor.getOutputTextureInfo());
GlUtil.focusFramebufferUsingCurrentContext(
outputTexture.fboId, outputTexture.width, outputTexture.height);
outputBitmap =
BitmapPixelTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer(
outputTexture.width, outputTexture.height);
}
}
}
...@@ -50,20 +50,36 @@ public class BitmapPixelTestUtil { ...@@ -50,20 +50,36 @@ public class BitmapPixelTestUtil {
/** /**
* Maximum allowed average pixel difference between the expected and actual edited images in pixel * Maximum allowed average pixel difference between the expected and actual edited images in pixel
* difference-based tests. The value is chosen so that differences in decoder behavior across * difference-based tests, between emulators.
* emulator versions don't affect whether the test passes for most emulators, but substantial
* distortions introduced by changes in tested components will cause the test to fail.
* *
* <p>To run pixel difference-based tests on physical devices, please use a value of 5f, rather * <p>The value is chosen so that differences in decoder behavior across emulator versions don't
* than 0.5f. This higher value will ignore some very small errors, but will allow for some * affect whether the test passes, but substantial distortions introduced by changes in tested
* differences caused by graphics implementations to be ignored. When the difference is close to * components will cause the test to fail.
* the threshold, manually inspect expected/actual bitmaps to confirm failure, as it's possible *
* this is caused by a difference in the codec or graphics implementation as opposed to an issue * <p>When the difference is close to the threshold, manually inspect expected/actual bitmaps to
* in the tested component. * confirm failure, as it's possible this is caused by a difference in the codec or graphics
* implementation as opposed to an issue in the tested component.
*/ */
public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE = 1.f; public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE = 1.f;
/** /**
* Maximum allowed average pixel difference between the expected and actual edited images in pixel
* difference-based tests, between devices, or devices and emulators.
*
* <p>The value is chosen so that differences in decoder behavior across devices don't affect
* whether the test passes, but substantial distortions introduced by changes in tested components
* will cause the test to fail.
*
* <p>When the difference is close to the threshold, manually inspect expected/actual bitmaps to
* confirm failure, as it's possible this is caused by a difference in the codec or graphics
* implementation as opposed to an issue in the tested component.
*
* <p>This value is larger than {@link #MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} to support the
* larger variance in decoder outputs between different physical devices and emulators.
*/
public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE = 5.f;
/**
* Reads a bitmap from the specified asset location. * Reads a bitmap from the specified asset location.
* *
* @param assetString Relative path to the asset within the assets directory. * @param assetString Relative path to the asset within the assets directory.
......
...@@ -28,6 +28,7 @@ import android.graphics.PixelFormat; ...@@ -28,6 +28,7 @@ import android.graphics.PixelFormat;
import android.media.Image; import android.media.Image;
import android.media.ImageReader; import android.media.ImageReader;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.view.Surface;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.util.DebugViewProvider; import com.google.android.exoplayer2.util.DebugViewProvider;
...@@ -55,6 +56,7 @@ public final class VideoFrameProcessorTestRunner { ...@@ -55,6 +56,7 @@ public final class VideoFrameProcessorTestRunner {
private @MonotonicNonNull String testId; private @MonotonicNonNull String testId;
private VideoFrameProcessor.@MonotonicNonNull Factory videoFrameProcessorFactory; private VideoFrameProcessor.@MonotonicNonNull Factory videoFrameProcessorFactory;
private BitmapReader.@MonotonicNonNull Factory bitmapReaderFactory;
private @MonotonicNonNull String videoAssetPath; private @MonotonicNonNull String videoAssetPath;
private @MonotonicNonNull String outputFileLabel; private @MonotonicNonNull String outputFileLabel;
private @MonotonicNonNull ImmutableList<Effect> effects; private @MonotonicNonNull ImmutableList<Effect> effects;
...@@ -95,6 +97,17 @@ public final class VideoFrameProcessorTestRunner { ...@@ -95,6 +97,17 @@ public final class VideoFrameProcessorTestRunner {
} }
/** /**
* Sets the {@link BitmapReader.Factory}.
*
* <p>The default value is {@link SurfaceBitmapReader.Factory}.
*/
@CanIgnoreReturnValue
public Builder setBitmapReaderFactory(BitmapReader.Factory bitmapReaderFactory) {
this.bitmapReaderFactory = bitmapReaderFactory;
return this;
}
/**
* Sets the input video asset path. * Sets the input video asset path.
* *
* <p>No default value is set. Must be set when the input is a video file. * <p>No default value is set. Must be set when the input is a video file.
...@@ -203,6 +216,7 @@ public final class VideoFrameProcessorTestRunner { ...@@ -203,6 +216,7 @@ public final class VideoFrameProcessorTestRunner {
return new VideoFrameProcessorTestRunner( return new VideoFrameProcessorTestRunner(
testId, testId,
videoFrameProcessorFactory, videoFrameProcessorFactory,
bitmapReaderFactory == null ? new SurfaceBitmapReader.Factory() : bitmapReaderFactory,
videoAssetPath, videoAssetPath,
outputFileLabel == null ? "" : outputFileLabel, outputFileLabel == null ? "" : outputFileLabel,
effects == null ? ImmutableList.of() : effects, effects == null ? ImmutableList.of() : effects,
...@@ -225,15 +239,16 @@ public final class VideoFrameProcessorTestRunner { ...@@ -225,15 +239,16 @@ public final class VideoFrameProcessorTestRunner {
private final String outputFileLabel; private final String outputFileLabel;
private final float pixelWidthHeightRatio; private final float pixelWidthHeightRatio;
private final AtomicReference<VideoFrameProcessingException> videoFrameProcessingException; private final AtomicReference<VideoFrameProcessingException> videoFrameProcessingException;
private final VideoFrameProcessor videoFrameProcessor; private final VideoFrameProcessor videoFrameProcessor;
private volatile @MonotonicNonNull ImageReader outputImageReader; private @MonotonicNonNull BitmapReader bitmapReader;
private volatile boolean videoFrameProcessingEnded; private volatile boolean videoFrameProcessingEnded;
private VideoFrameProcessorTestRunner( private VideoFrameProcessorTestRunner(
String testId, String testId,
VideoFrameProcessor.Factory videoFrameProcessorFactory, VideoFrameProcessor.Factory videoFrameProcessorFactory,
BitmapReader.Factory bitmapReaderFactory,
@Nullable String videoAssetPath, @Nullable String videoAssetPath,
String outputFileLabel, String outputFileLabel,
ImmutableList<Effect> effects, ImmutableList<Effect> effects,
...@@ -260,15 +275,13 @@ public final class VideoFrameProcessorTestRunner { ...@@ -260,15 +275,13 @@ public final class VideoFrameProcessorTestRunner {
/* releaseFramesAutomatically= */ true, /* releaseFramesAutomatically= */ true,
MoreExecutors.directExecutor(), MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() { new VideoFrameProcessor.Listener() {
@SuppressLint("WrongConstant")
@Override @Override
public void onOutputSizeChanged(int width, int height) { public void onOutputSizeChanged(int width, int height) {
outputImageReader = bitmapReader =
ImageReader.newInstance( bitmapReaderFactory.create(checkNotNull(videoFrameProcessor), width, height);
width, height, PixelFormat.RGBA_8888, /* maxImages= */ 1); Surface outputSurface = bitmapReader.getSurface();
checkNotNull(videoFrameProcessor) videoFrameProcessor.setOutputSurfaceInfo(
.setOutputSurfaceInfo( new SurfaceInfo(outputSurface, width, height));
new SurfaceInfo(outputImageReader.getSurface(), width, height));
} }
@Override @Override
...@@ -289,7 +302,6 @@ public final class VideoFrameProcessorTestRunner { ...@@ -289,7 +302,6 @@ public final class VideoFrameProcessorTestRunner {
}); });
} }
@RequiresApi(19)
public Bitmap processFirstFrameAndEnd() throws Exception { public Bitmap processFirstFrameAndEnd() throws Exception {
DecodeOneFrameUtil.decodeOneAssetFileFrame( DecodeOneFrameUtil.decodeOneAssetFileFrame(
checkNotNull(videoAssetPath), checkNotNull(videoAssetPath),
...@@ -322,19 +334,16 @@ public final class VideoFrameProcessorTestRunner { ...@@ -322,19 +334,16 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate); videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
} }
@RequiresApi(19)
public Bitmap endFrameProcessingAndGetImage() throws Exception { public Bitmap endFrameProcessingAndGetImage() throws Exception {
videoFrameProcessor.signalEndOfInput(); videoFrameProcessor.signalEndOfInput();
Thread.sleep(VIDEO_FRAME_PROCESSING_WAIT_MS); Thread.sleep(VIDEO_FRAME_PROCESSING_WAIT_MS);
assertThat(videoFrameProcessingEnded).isTrue();
assertThat(videoFrameProcessingException.get()).isNull(); assertThat(videoFrameProcessingException.get()).isNull();
assertThat(videoFrameProcessingEnded).isTrue();
Image videoFrameProcessorOutputImage = checkNotNull(outputImageReader).acquireLatestImage(); Bitmap outputBitmap = checkNotNull(bitmapReader).getBitmap();
Bitmap actualBitmap = createArgb8888BitmapFromRgba8888Image(videoFrameProcessorOutputImage); maybeSaveTestBitmap(testId, /* bitmapLabel= */ outputFileLabel, outputBitmap, /* path= */ null);
videoFrameProcessorOutputImage.close(); return outputBitmap;
maybeSaveTestBitmap(testId, /* bitmapLabel= */ outputFileLabel, actualBitmap, /* path= */ null);
return actualBitmap;
} }
public void release() { public void release() {
...@@ -346,4 +355,56 @@ public final class VideoFrameProcessorTestRunner { ...@@ -346,4 +355,56 @@ public final class VideoFrameProcessorTestRunner {
public interface OnOutputFrameAvailableListener { public interface OnOutputFrameAvailableListener {
void onFrameAvailable(long presentationTimeUs); void onFrameAvailable(long presentationTimeUs);
} }
/** Reads a {@link Bitmap} from {@link VideoFrameProcessor} output. */
public interface BitmapReader {
interface Factory {
BitmapReader create(VideoFrameProcessor videoFrameProcessor, int width, int height);
}
/** Returns the {@link VideoFrameProcessor} output {@link Surface}. */
Surface getSurface();
/** Returns the output {@link Bitmap}. */
Bitmap getBitmap();
}
/**
* {@inheritDoc}
*
* <p>Reads from a {@link Surface}. Only supports SDR input.
*/
public static final class SurfaceBitmapReader
implements VideoFrameProcessorTestRunner.BitmapReader {
public static final class Factory
implements VideoFrameProcessorTestRunner.BitmapReader.Factory {
@Override
public SurfaceBitmapReader create(
VideoFrameProcessor videoFrameProcessor, int width, int height) {
return new SurfaceBitmapReader(width, height);
}
}
// ImageReader only supports SDR input.
private final ImageReader imageReader;
@SuppressLint("WrongConstant")
private SurfaceBitmapReader(int width, int height) {
imageReader =
ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, /* maxImages= */ 1);
}
@Override
public Surface getSurface() {
return imageReader.getSurface();
}
@Override
public Bitmap getBitmap() {
Image outputImage = checkNotNull(imageReader).acquireLatestImage();
Bitmap outputBitmap = createArgb8888BitmapFromRgba8888Image(outputImage);
outputImage.close();
return outputBitmap;
}
}
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment