Commit 4957980c by tofunmi Committed by Tofunmi Adigun-Hameed

Effect: Support input via texture ID

PiperOrigin-RevId: 530624195
parent 56c070a6
...@@ -386,6 +386,11 @@ public final class GlUtil { ...@@ -386,6 +386,11 @@ public final class GlUtil {
return eglSurface; return eglSurface;
} }
/** Gets the current {@link EGLContext context}. */
public static EGLContext getCurrentContext() {
return EGL14.eglGetCurrentContext();
}
/** /**
* Collects all OpenGL errors that occurred since this method was last called and throws a {@link * Collects all OpenGL errors that occurred since this method was last called and throws a {@link
* GlException} with the combined error message. * GlException} with the combined error message.
......
...@@ -45,6 +45,14 @@ import java.util.concurrent.Executor; ...@@ -45,6 +45,14 @@ import java.util.concurrent.Executor;
*/ */
public interface VideoFrameProcessor { public interface VideoFrameProcessor {
// TODO(b/243036513): Allow effects to be replaced. // TODO(b/243036513): Allow effects to be replaced.
/** A listener for frame processing events. */
public interface OnInputFrameProcessedListener {
/** Called when the given input frame has been processed. */
void onInputFrameProcessed(int textureId) throws VideoFrameProcessingException;
}
/** /**
* Specifies how the input frames are made available to the {@link VideoFrameProcessor}. One of * Specifies how the input frames are made available to the {@link VideoFrameProcessor}. One of
* {@link #INPUT_TYPE_SURFACE}, {@link #INPUT_TYPE_BITMAP} or {@link #INPUT_TYPE_TEXTURE_ID}. * {@link #INPUT_TYPE_SURFACE}, {@link #INPUT_TYPE_BITMAP} or {@link #INPUT_TYPE_TEXTURE_ID}.
...@@ -165,6 +173,28 @@ public interface VideoFrameProcessor { ...@@ -165,6 +173,28 @@ public interface VideoFrameProcessor {
void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate); void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate);
/** /**
* Provides an input texture ID to the {@code VideoFrameProcessor}.
*
* <p>It must be called after the {@link #setOnInputFrameProcessedListener
* onInputFrameProcessedListener} and the {@link #setInputFrameInfo frameInfo} have been set.
*
* <p>Can be called on any thread.
*
* @param textureId The ID of the texture queued to the {@code VideoFrameProcessor}.
* @param presentationTimeUs The presentation time of the queued texture, in microseconds.
*/
void queueInputTexture(int textureId, long presentationTimeUs);
/**
* Sets the {@link OnInputFrameProcessedListener}.
*
* <p>Can be called on any thread.
*
* @param listener The {@link OnInputFrameProcessedListener}.
*/
void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener);
/**
* Returns the input {@link Surface}, where {@link VideoFrameProcessor} consumes input frames * Returns the input {@link Surface}, where {@link VideoFrameProcessor} consumes input frames
* from. * from.
* *
...@@ -188,7 +218,7 @@ public interface VideoFrameProcessor { ...@@ -188,7 +218,7 @@ public interface VideoFrameProcessor {
* Sets information about the input frames. * Sets information about the input frames.
* *
* <p>The new input information is applied from the next frame {@linkplain #registerInputFrame() * <p>The new input information is applied from the next frame {@linkplain #registerInputFrame()
* registered} onwards. * registered} or {@linkplain #queueInputTexture} queued} onwards.
* *
* <p>Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output * <p>Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output
* frames' pixels have a ratio of 1. * frames' pixels have a ratio of 1.
......
...@@ -45,6 +45,7 @@ import com.google.android.exoplayer2.util.SurfaceInfo; ...@@ -45,6 +45,7 @@ import com.google.android.exoplayer2.util.SurfaceInfo;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor; import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
...@@ -70,7 +71,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -70,7 +71,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
public interface TextureOutputListener { public interface TextureOutputListener {
/** Called when a texture has been rendered to. */ /** Called when a texture has been rendered to. */
void onTextureRendered(GlTextureInfo outputTexture, long presentationTimeUs) void onTextureRendered(GlTextureInfo outputTexture, long presentationTimeUs)
throws GlUtil.GlException; throws GlUtil.GlException, VideoFrameProcessingException;
} }
/** A factory for {@link DefaultVideoFrameProcessor} instances. */ /** A factory for {@link DefaultVideoFrameProcessor} instances. */
...@@ -134,7 +135,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -134,7 +135,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
} }
private final boolean enableColorTransfers; private final boolean enableColorTransfers;
private final GlObjectsProvider glObjectsProvider; private final GlObjectsProvider glObjectsProvider;
@Nullable private final TextureOutputListener textureOutputListener; @Nullable private final TextureOutputListener textureOutputListener;
...@@ -348,6 +348,16 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -348,6 +348,16 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
} }
@Override @Override
public void queueInputTexture(int textureId, long presentationTimeUs) {
checkNotNull(textureManager).queueInputTexture(textureId, presentationTimeUs);
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
checkNotNull(textureManager).setOnInputFrameProcessedListener(listener);
}
@Override
public Surface getInputSurface() { public Surface getInputSurface() {
return checkNotNull(textureManager).getInputSurface(); return checkNotNull(textureManager).getInputSurface();
} }
...@@ -380,6 +390,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -380,6 +390,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Override @Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) { public void setInputFrameInfo(FrameInfo inputFrameInfo) {
nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo); nextInputFrameInfo = adjustForPixelWidthHeightRatio(inputFrameInfo);
checkNotNull(textureManager).setInputFrameInfo(nextInputFrameInfo);
hasRefreshedNextInputFrameInfo = true; hasRefreshedNextInputFrameInfo = true;
} }
...@@ -562,7 +573,10 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -562,7 +573,10 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
// HDR bitmaps are not supported. // HDR bitmaps are not supported.
inputSwitcher.registerInput(INPUT_TYPE_BITMAP); inputSwitcher.registerInput(INPUT_TYPE_BITMAP);
} }
if (inputColorInfo.colorTransfer != C.COLOR_TRANSFER_SRGB) {
// Image and textureId concatenation not supported.
inputSwitcher.registerInput(INPUT_TYPE_TEXTURE_ID);
}
inputSwitcher.setDownstreamShaderProgram(effectsShaderPrograms.get(0)); inputSwitcher.setDownstreamShaderProgram(effectsShaderPrograms.get(0));
setGlObjectProviderOnShaderPrograms(effectsShaderPrograms, glObjectsProvider); setGlObjectProviderOnShaderPrograms(effectsShaderPrograms, glObjectsProvider);
......
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.effect;
import android.util.Pair;
import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.GlTextureInfo;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import java.util.ArrayDeque;
import java.util.Queue;
/**
* Manages queueing frames and sending them to a given {@link GlShaderProgram
* consumingGLShaderProgram} at a consumable pace.
*
* <p>Frames are stored as a {@link GlTextureInfo} with a {@code presentationTimeUs}.
*/
// TODO(b/261820382): Converge ChainingGlShaderProgramListener with this class.
/* package */ final class FrameConsumptionManager implements GlShaderProgram.InputListener {
private final GlShaderProgram consumingGlShaderProgram;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
@GuardedBy("this")
private final Queue<Pair<GlTextureInfo, Long>> availableFrames;
@GuardedBy("this")
private int consumingGlShaderProgramInputCapacity;
/**
* Creates a new instance.
*
* @param consumingGlShaderProgram The {@link GlShaderProgram} for which this {@code
* texIdTextureManager} will be set as the {@link GlShaderProgram.InputListener}.
* @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor}.
*/
public FrameConsumptionManager(
GlShaderProgram consumingGlShaderProgram,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.consumingGlShaderProgram = consumingGlShaderProgram;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
availableFrames = new ArrayDeque<>();
}
@Override
public synchronized void onReadyToAcceptInputFrame() {
@Nullable Pair<GlTextureInfo, Long> pendingFrame = availableFrames.poll();
if (pendingFrame == null) {
consumingGlShaderProgramInputCapacity++;
return;
}
long presentationTimeUs = pendingFrame.second;
if (presentationTimeUs == C.TIME_END_OF_SOURCE) {
videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
} else {
videoFrameProcessingTaskExecutor.submit(
() ->
consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ pendingFrame.first, presentationTimeUs));
}
}
@Override
public synchronized void onFlush() {
consumingGlShaderProgramInputCapacity = 0;
availableFrames.clear();
}
public synchronized void queueInputFrame(GlTextureInfo texture, long presentationTimeUs) {
if (consumingGlShaderProgramInputCapacity > 0) {
videoFrameProcessingTaskExecutor.submit(
() ->
consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ texture, presentationTimeUs));
consumingGlShaderProgramInputCapacity--;
} else {
availableFrames.add(Pair.create(texture, presentationTimeUs));
}
}
/**
* Notifies the {@link GlShaderProgram consumingGlShaderProgram} that the current input stream is
* finished once all the pending frames are queued.
*/
public synchronized void signalEndOfCurrentStream() {
if (!availableFrames.isEmpty()) {
availableFrames.add(Pair.create(GlTextureInfo.UNSET, C.TIME_END_OF_SOURCE));
} else {
videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
}
}
/** See {@link VideoFrameProcessor#getPendingInputFrameCount}. */
public synchronized int getPendingFrameCount() {
return availableFrames.size();
}
}
...@@ -115,7 +115,21 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -115,7 +115,21 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
new BitmapTextureManager(samplingShaderProgram, videoFrameProcessingTaskExecutor); new BitmapTextureManager(samplingShaderProgram, videoFrameProcessingTaskExecutor);
inputs.put(inputType, new Input(textureManager, samplingShaderProgram)); inputs.put(inputType, new Input(textureManager, samplingShaderProgram));
break; break;
case VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID: // fall through case VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID:
samplingShaderProgram =
DefaultShaderProgram.createWithInternalSampler(
context,
/* matrixTransformations= */ ImmutableList.of(),
/* rgbMatrices= */ ImmutableList.of(),
inputColorInfo,
outputColorInfo,
enableColorTransfers,
inputType);
samplingShaderProgram.setGlObjectsProvider(glObjectsProvider);
textureManager =
new TexIdTextureManager(samplingShaderProgram, videoFrameProcessingTaskExecutor);
inputs.put(inputType, new Input(textureManager, samplingShaderProgram));
break;
default: default:
throw new VideoFrameProcessingException("Unsupported input type " + inputType); throw new VideoFrameProcessingException("Unsupported input type " + inputType);
} }
......
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import android.opengl.GLES10;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.GlTextureInfo;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* Forwards a video frames made available via {@linkplain GLES10#GL_TEXTURE_2D traditional GLES
* texture} to a {@link GlShaderProgram} for consumption.
*
* <p>Public methods in this class can be called from any thread.
*/
/* package */ final class TexIdTextureManager implements TextureManager {
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private final FrameConsumptionManager frameConsumptionManager;
private @MonotonicNonNull OnInputFrameProcessedListener frameProcessedListener;
private @MonotonicNonNull FrameInfo inputFrameInfo;
/**
* Creates a new instance.
*
* @param shaderProgram The {@link GlShaderProgram} for which this {@code texIdTextureManager}
* will be set as the {@link GlShaderProgram.InputListener}.
* @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor}.
*/
public TexIdTextureManager(
GlShaderProgram shaderProgram,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
frameConsumptionManager =
new FrameConsumptionManager(shaderProgram, videoFrameProcessingTaskExecutor);
}
@Override
public void onReadyToAcceptInputFrame() {
videoFrameProcessingTaskExecutor.submit(frameConsumptionManager::onReadyToAcceptInputFrame);
}
@Override
public void onInputFrameProcessed(GlTextureInfo inputTexture) {
videoFrameProcessingTaskExecutor.submit(
() -> checkNotNull(frameProcessedListener).onInputFrameProcessed(inputTexture.texId));
}
@Override
public void onFlush() {
videoFrameProcessingTaskExecutor.submit(frameConsumptionManager::onFlush);
}
@Override
public void queueInputTexture(int inputTexId, long presentationTimeUs) {
FrameInfo frameInfo = checkNotNull(this.inputFrameInfo);
checkNotNull(frameProcessedListener);
videoFrameProcessingTaskExecutor.submit(
() -> {
GlTextureInfo inputTexture =
new GlTextureInfo(
inputTexId,
/* fboId= */ C.INDEX_UNSET,
/* rboId= */ C.INDEX_UNSET,
frameInfo.width,
frameInfo.height);
frameConsumptionManager.queueInputFrame(inputTexture, presentationTimeUs);
});
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
frameProcessedListener = listener;
}
@Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) {
this.inputFrameInfo = inputFrameInfo;
}
@Override
public int getPendingFrameCount() {
return frameConsumptionManager.getPendingFrameCount();
}
@Override
public void signalEndOfCurrentInputStream() {
videoFrameProcessingTaskExecutor.submit(frameConsumptionManager::signalEndOfCurrentStream);
}
@Override
public void signalEndOfInput() {
// Do nothing.
}
@Override
public void setOnFlushCompleteListener(@Nullable VideoFrameProcessingTask task) {
// Do nothing.
}
@Override
public void release() {
// Do nothing.
}
}
...@@ -23,6 +23,7 @@ import android.view.Surface; ...@@ -23,6 +23,7 @@ import android.view.Surface;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.util.FrameInfo; import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.VideoFrameProcessor; import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
/** A component that handles {@code DefaultVideoFrameProcessor}'s input. */ /** A component that handles {@code DefaultVideoFrameProcessor}'s input. */
/* package */ interface TextureManager extends GlShaderProgram.InputListener { /* package */ interface TextureManager extends GlShaderProgram.InputListener {
...@@ -53,6 +54,33 @@ import com.google.android.exoplayer2.util.VideoFrameProcessor; ...@@ -53,6 +54,33 @@ import com.google.android.exoplayer2.util.VideoFrameProcessor;
} }
/** /**
* Provides an input texture ID to the {@code VideoFrameProcessor}.
*
* @see VideoFrameProcessor#queueInputTexture
*/
default void queueInputTexture(int inputTexId, long presentationTimeUs) {
throw new UnsupportedOperationException();
}
/**
* Sets the {@link OnInputFrameProcessedListener}.
*
* @see VideoFrameProcessor#setOnInputFrameProcessedListener
*/
default void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
throw new UnsupportedOperationException();
}
/**
* Sets information about the input frames.
*
* @see VideoFrameProcessor#setInputFrameInfo
*/
default void setInputFrameInfo(FrameInfo inputFrameInfo) {
// Do nothing.
}
/**
* See {@link VideoFrameProcessor#getInputSurface}. * See {@link VideoFrameProcessor#getInputSurface}.
* *
* <p>Only works when the input is received on a {@link SurfaceTexture}. * <p>Only works when the input is received on a {@link SurfaceTexture}.
......
...@@ -20,6 +20,7 @@ import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.MAXIMUM ...@@ -20,6 +20,7 @@ import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.MAXIMUM
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16; import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888; import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.readBitmap; import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.readBitmap;
import static com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner.VIDEO_FRAME_PROCESSING_WAIT_MS;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT; import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT; import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_FORMAT; import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
...@@ -35,6 +36,7 @@ import android.view.Surface; ...@@ -35,6 +36,7 @@ import android.view.Surface;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.effect.BitmapOverlay; import com.google.android.exoplayer2.effect.BitmapOverlay;
import com.google.android.exoplayer2.effect.DefaultGlObjectsProvider;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor; import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.effect.GlEffect; import com.google.android.exoplayer2.effect.GlEffect;
import com.google.android.exoplayer2.effect.GlShaderProgram; import com.google.android.exoplayer2.effect.GlShaderProgram;
...@@ -42,12 +44,15 @@ import com.google.android.exoplayer2.effect.OverlayEffect; ...@@ -42,12 +44,15 @@ import com.google.android.exoplayer2.effect.OverlayEffect;
import com.google.android.exoplayer2.effect.ScaleAndRotateTransformation; import com.google.android.exoplayer2.effect.ScaleAndRotateTransformation;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner; import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner.BitmapReader;
import com.google.android.exoplayer2.transformer.AndroidTestUtil; import com.google.android.exoplayer2.transformer.AndroidTestUtil;
import com.google.android.exoplayer2.transformer.EncoderUtil; import com.google.android.exoplayer2.transformer.EncoderUtil;
import com.google.android.exoplayer2.util.GlObjectsProvider;
import com.google.android.exoplayer2.util.GlTextureInfo; import com.google.android.exoplayer2.util.GlTextureInfo;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
...@@ -120,6 +125,45 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest { ...@@ -120,6 +125,45 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
} }
@Test @Test
public void noEffects_textureInput_matchesGoldenFile() throws Exception {
String testId = "noEffects_textureInput_matchesGoldenFile";
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ null)) {
return;
}
TextureBitmapReader producersBitmapReader = new TextureBitmapReader();
TextureBitmapReader consumersBitmapReader = new TextureBitmapReader();
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setOnTextureRenderedListener(
(outputTexture, presentationTimeUs) ->
inputTextureIntoVideoFrameProcessor(
testId, consumersBitmapReader, outputTexture, presentationTimeUs))
.build();
VideoFrameProcessorTestRunner texIdProducingVideoFrameProcessorTestRunner =
new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
.setVideoAssetPath(INPUT_SDR_MP4_ASSET_STRING)
.setBitmapReader(producersBitmapReader)
.build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
texIdProducingVideoFrameProcessorTestRunner.processFirstFrameAndEnd();
texIdProducingVideoFrameProcessorTestRunner.release();
Bitmap actualBitmap = consumersBitmapReader.getBitmap();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
@Test
public void bitmapOverlay_matchesGoldenFile() throws Exception { public void bitmapOverlay_matchesGoldenFile() throws Exception {
String testId = "bitmapOverlay_matchesGoldenFile"; String testId = "bitmapOverlay_matchesGoldenFile";
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported( if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
...@@ -147,6 +191,48 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest { ...@@ -147,6 +191,48 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
} }
@Test @Test
public void bitmapOverlay_textureInput_matchesGoldenFile() throws Exception {
String testId = "bitmapOverlay_textureInput_matchesGoldenFile";
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ null)) {
return;
}
Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH);
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
TextureBitmapReader producersBitmapReader = new TextureBitmapReader();
TextureBitmapReader consumersBitmapReader = new TextureBitmapReader();
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setOnTextureRenderedListener(
(outputTexture, presentationTimeUs) ->
inputTextureIntoVideoFrameProcessor(
testId, consumersBitmapReader, outputTexture, presentationTimeUs))
.build();
VideoFrameProcessorTestRunner texIdProducingVideoFrameProcessorTestRunner =
new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
.setVideoAssetPath(INPUT_SDR_MP4_ASSET_STRING)
.setBitmapReader(producersBitmapReader)
.setEffects(new OverlayEffect(ImmutableList.of(bitmapOverlay)))
.build();
texIdProducingVideoFrameProcessorTestRunner.processFirstFrameAndEnd();
texIdProducingVideoFrameProcessorTestRunner.release();
Bitmap expectedBitmap = readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH);
Bitmap actualBitmap = consumersBitmapReader.getBitmap();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
@Test
public void noEffects_hlg10Input_matchesGoldenFile() throws Exception { public void noEffects_hlg10Input_matchesGoldenFile() throws Exception {
String testId = "noEffects_hlg10Input_matchesGoldenFile"; String testId = "noEffects_hlg10Input_matchesGoldenFile";
Context context = getApplicationContext(); Context context = getApplicationContext();
...@@ -284,6 +370,36 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest { ...@@ -284,6 +370,36 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16); .isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
} }
private void inputTextureIntoVideoFrameProcessor(
String testId,
TextureBitmapReader bitmapReader,
GlTextureInfo texture,
long presentationTimeUs)
throws VideoFrameProcessingException {
GlObjectsProvider contextSharingGlObjectsProvider =
new DefaultGlObjectsProvider(GlUtil.getCurrentContext());
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setOnTextureRenderedListener(bitmapReader::readBitmapFromTexture)
.setGlObjectsProvider(contextSharingGlObjectsProvider)
.build();
videoFrameProcessorTestRunner =
new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
.setVideoAssetPath(INPUT_SDR_MP4_ASSET_STRING)
.setBitmapReader(bitmapReader)
.setInputType(VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID)
.build();
videoFrameProcessorTestRunner.queueInputTexture(texture, presentationTimeUs);
try {
videoFrameProcessorTestRunner.endFrameProcessing(VIDEO_FRAME_PROCESSING_WAIT_MS / 2);
} catch (InterruptedException e) {
throw new VideoFrameProcessingException(e);
}
}
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder( private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) { String testId) {
TextureBitmapReader textureBitmapReader = new TextureBitmapReader(); TextureBitmapReader textureBitmapReader = new TextureBitmapReader();
...@@ -303,8 +419,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest { ...@@ -303,8 +419,7 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
* *
* <p>Reads from an OpenGL texture. Only for use on physical devices. * <p>Reads from an OpenGL texture. Only for use on physical devices.
*/ */
private static final class TextureBitmapReader private static final class TextureBitmapReader implements BitmapReader {
implements VideoFrameProcessorTestRunner.BitmapReader {
// TODO(b/239172735): This outputs an incorrect black output image on emulators. // TODO(b/239172735): This outputs an incorrect black output image on emulators.
private boolean useHighPrecisionColorComponents; private boolean useHighPrecisionColorComponents;
......
...@@ -22,6 +22,7 @@ import static com.google.android.exoplayer2.util.Assertions.checkNotNull; ...@@ -22,6 +22,7 @@ import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_BITMAP; import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_SURFACE; import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
...@@ -36,6 +37,8 @@ import androidx.annotation.RequiresApi; ...@@ -36,6 +37,8 @@ import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.util.DebugViewProvider; import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect; import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameInfo; import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.GlTextureInfo;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.SurfaceInfo; import com.google.android.exoplayer2.util.SurfaceInfo;
import com.google.android.exoplayer2.util.VideoFrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor; import com.google.android.exoplayer2.util.VideoFrameProcessor;
...@@ -349,6 +352,23 @@ public final class VideoFrameProcessorTestRunner { ...@@ -349,6 +352,23 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate); videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
} }
public void queueInputTexture(GlTextureInfo inputTexture, long pts) {
videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(inputTexture.width, inputTexture.height)
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build());
videoFrameProcessor.registerInputStream(INPUT_TYPE_TEXTURE_ID);
videoFrameProcessor.setOnInputFrameProcessedListener(
texId -> {
try {
GlUtil.deleteTexture(texId);
} catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e);
}
});
videoFrameProcessor.queueInputTexture(inputTexture.texId, pts);
}
public void endFrameProcessing() throws InterruptedException { public void endFrameProcessing() throws InterruptedException {
endFrameProcessing(VIDEO_FRAME_PROCESSING_WAIT_MS); endFrameProcessing(VIDEO_FRAME_PROCESSING_WAIT_MS);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment