Commit e20cace6 by huangdarwin Committed by Tofunmi Adigun-Hameed

Effect: Add multiple texture output test.

Confirms that multiple textures can be output, and that timestamps and pixels
are as expected.

PiperOrigin-RevId: 538459296
(cherry picked from commit cfcc53b3829f5dedbd4a2e96af87f3b05dec7715)
parent 39de7a64
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer.mh;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.maybeSaveTestBitmap;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.readBitmap;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static com.google.common.truth.Truth.assertThat;
import android.graphics.Bitmap;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.video.ColorInfo;
import java.util.Set;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Tests for {@link DefaultVideoFrameProcessor} texture output.
*
* <p>Confirms that the output timestamps are correct for each frame, and that the output pixels are
* correct for the first frame of each bitmap.
*/
@RunWith(AndroidJUnit4.class)
public class DefaultVideoFrameProcessorMultipleTextureOutputPixelTest {
private static final String ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
private static final String MEDIA3_TEST_PNG_ASSET_PATH =
"media/bitmap/input_images/media3test.png";
private static final String SRGB_TO_ELECTRICAL_ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/srgb_to_electrical_original.png";
private static final String SRGB_TO_ELECTRICAL_MEDIA3_TEST_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/srgb_to_electrical_media3test.png";
private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
private @MonotonicNonNull TextureBitmapReader textureBitmapReader;
@After
public void release() {
checkNotNull(videoFrameProcessorTestRunner).release();
}
@Test
public void textureOutput_queueBitmap_matchesGoldenFile() throws Exception {
String testId = "textureOutput_queueBitmap_matchesGoldenFile";
videoFrameProcessorTestRunner = getFrameProcessorTestRunnerBuilder(testId).build();
long offsetUs = 1_000_000L;
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ 3 * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ offsetUs,
/* frameRate= */ 1);
videoFrameProcessorTestRunner.endFrameProcessing();
Set<Long> outputTimestamps = checkNotNull(textureBitmapReader).getOutputTimestamps();
assertThat(outputTimestamps)
.containsExactly(
offsetUs, offsetUs + C.MICROS_PER_SECOND, offsetUs + 2 * C.MICROS_PER_SECOND);
Bitmap actualBitmap = checkNotNull(textureBitmapReader).getBitmap(offsetUs);
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual", actualBitmap, /* path= */ null);
float averagePixelAbsoluteDifference =
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888(
readBitmap(SRGB_TO_ELECTRICAL_ORIGINAL_PNG_ASSET_PATH), actualBitmap, testId);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
@Test
public void textureOutput_queueTwoBitmaps_matchesGoldenFiles() throws Exception {
String testId = "textureOutput_queueTwoBitmaps_matchesGoldenFiles";
videoFrameProcessorTestRunner = getFrameProcessorTestRunnerBuilder(testId).build();
long offsetUs1 = 1_000_000L;
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ offsetUs1,
/* frameRate= */ 2);
long offsetUs2 = 2_000_000L;
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(MEDIA3_TEST_PNG_ASSET_PATH),
/* durationUs= */ 3 * C.MICROS_PER_SECOND,
/* offsetToAddUs= */ offsetUs2,
/* frameRate= */ 1);
videoFrameProcessorTestRunner.endFrameProcessing();
Set<Long> outputTimestamps = checkNotNull(textureBitmapReader).getOutputTimestamps();
assertThat(outputTimestamps)
.containsExactly(
offsetUs1,
offsetUs1 + C.MICROS_PER_SECOND / 2,
offsetUs2,
offsetUs2 + C.MICROS_PER_SECOND,
offsetUs2 + 2 * C.MICROS_PER_SECOND);
Bitmap actualBitmap1 = checkNotNull(textureBitmapReader).getBitmap(offsetUs1);
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual1", actualBitmap1, /* path= */ null);
Bitmap actualBitmap2 = checkNotNull(textureBitmapReader).getBitmap(offsetUs2);
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual2", actualBitmap2, /* path= */ null);
float averagePixelAbsoluteDifference1 =
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888(
readBitmap(SRGB_TO_ELECTRICAL_ORIGINAL_PNG_ASSET_PATH), actualBitmap1, testId);
assertThat(averagePixelAbsoluteDifference1)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
float averagePixelAbsoluteDifference2 =
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888(
readBitmap(SRGB_TO_ELECTRICAL_MEDIA3_TEST_PNG_ASSET_PATH), actualBitmap2, testId);
assertThat(averagePixelAbsoluteDifference2)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
}
private VideoFrameProcessorTestRunner.Builder getFrameProcessorTestRunnerBuilder(String testId) {
textureBitmapReader = new TextureBitmapReader();
VideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setTextureOutput(
textureBitmapReader::readBitmapFromTexture, /* textureOutputCapacity= */ 1)
.build();
return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setBitmapReader(textureBitmapReader);
}
}
...@@ -26,14 +26,11 @@ import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSE ...@@ -26,14 +26,11 @@ import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSE
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_FORMAT; import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.recordTestSkipped; import static com.google.android.exoplayer2.transformer.AndroidTestUtil.recordTestSkipped;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import static com.google.android.exoplayer2.video.ColorInfo.SDR_BT709_LIMITED; import static com.google.android.exoplayer2.video.ColorInfo.SDR_BT709_LIMITED;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.view.Surface;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.effect.BitmapOverlay; import com.google.android.exoplayer2.effect.BitmapOverlay;
...@@ -45,21 +42,18 @@ import com.google.android.exoplayer2.effect.OverlayEffect; ...@@ -45,21 +42,18 @@ import com.google.android.exoplayer2.effect.OverlayEffect;
import com.google.android.exoplayer2.effect.ScaleAndRotateTransformation; import com.google.android.exoplayer2.effect.ScaleAndRotateTransformation;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner; import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner.BitmapReader;
import com.google.android.exoplayer2.transformer.AndroidTestUtil; import com.google.android.exoplayer2.transformer.AndroidTestUtil;
import com.google.android.exoplayer2.transformer.EncoderUtil; import com.google.android.exoplayer2.transformer.EncoderUtil;
import com.google.android.exoplayer2.util.Effect; import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.GlObjectsProvider; import com.google.android.exoplayer2.util.GlObjectsProvider;
import com.google.android.exoplayer2.util.GlTextureInfo; import com.google.android.exoplayer2.util.GlTextureInfo;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor; import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.util.List; import java.util.List;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.junit.After; import org.junit.After;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
...@@ -579,57 +573,6 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest { ...@@ -579,57 +573,6 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
.setBitmapReader(textureBitmapReader); .setBitmapReader(textureBitmapReader);
} }
/**
* {@inheritDoc}
*
* <p>Reads from an OpenGL texture. Only for use on physical devices.
*/
private static final class TextureBitmapReader implements BitmapReader {
// TODO(b/239172735): This outputs an incorrect black output image on emulators.
private boolean useHighPrecisionColorComponents;
private @MonotonicNonNull Bitmap outputBitmap;
@Nullable
@Override
public Surface getSurface(int width, int height, boolean useHighPrecisionColorComponents) {
this.useHighPrecisionColorComponents = useHighPrecisionColorComponents;
return null;
}
@Override
public Bitmap getBitmap() {
return checkStateNotNull(outputBitmap);
}
public void readBitmapFromTexture(
GlTextureInfo outputTexture,
long presentationTimeUs,
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseOutputTextureCallback)
throws VideoFrameProcessingException {
try {
GlUtil.focusFramebufferUsingCurrentContext(
outputTexture.getFboId(), outputTexture.getWidth(), outputTexture.getHeight());
outputBitmap =
createBitmapFromCurrentGlFrameBuffer(
outputTexture.getWidth(),
outputTexture.getHeight(),
useHighPrecisionColorComponents);
} catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e);
}
releaseOutputTextureCallback.release(presentationTimeUs);
}
private static Bitmap createBitmapFromCurrentGlFrameBuffer(
int width, int height, boolean useHighPrecisionColorComponents) throws GlUtil.GlException {
if (!useHighPrecisionColorComponents) {
return BitmapPixelTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer(width, height);
}
checkState(Util.SDK_INT > 26, "useHighPrecisionColorComponents only supported on API 26+");
return BitmapPixelTestUtil.createFp16BitmapFromCurrentGlFramebuffer(width, height);
}
}
private static boolean deviceSupportsHdrEditing(Format format) { private static boolean deviceSupportsHdrEditing(Format format) {
return !EncoderUtil.getSupportedEncodersForHdrEditing( return !EncoderUtil.getSupportedEncodersForHdrEditing(
checkNotNull(checkNotNull(format).sampleMimeType), format.colorInfo) checkNotNull(checkNotNull(format).sampleMimeType), format.colorInfo)
......
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer.mh;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import android.graphics.Bitmap;
import android.view.Surface;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner;
import com.google.android.exoplayer2.util.GlTextureInfo;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* {@inheritDoc}
*
* <p>Reads from an OpenGL texture. Only for use on physical devices.
*/
public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.BitmapReader {
// TODO(b/239172735): This outputs an incorrect black output image on emulators.
private final Map<Long, Bitmap> outputTimestampsToBitmaps;
private boolean useHighPrecisionColorComponents;
private @MonotonicNonNull Bitmap outputBitmap;
public TextureBitmapReader() {
outputTimestampsToBitmaps = new ConcurrentHashMap<>();
}
@Nullable
@Override
public Surface getSurface(int width, int height, boolean useHighPrecisionColorComponents) {
this.useHighPrecisionColorComponents = useHighPrecisionColorComponents;
return null;
}
@Override
public Bitmap getBitmap() {
return checkStateNotNull(outputBitmap);
}
public Bitmap getBitmap(long presentationTimeUs) {
return checkStateNotNull(outputTimestampsToBitmaps.get(presentationTimeUs));
}
public Set<Long> getOutputTimestamps() {
return outputTimestampsToBitmaps.keySet();
}
public void readBitmapFromTexture(
GlTextureInfo outputTexture,
long presentationTimeUs,
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseOutputTextureCallback)
throws VideoFrameProcessingException {
try {
GlUtil.focusFramebufferUsingCurrentContext(
outputTexture.getFboId(), outputTexture.getWidth(), outputTexture.getHeight());
outputBitmap =
createBitmapFromCurrentGlFrameBuffer(
outputTexture.getWidth(), outputTexture.getHeight(), useHighPrecisionColorComponents);
} catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e);
}
outputTimestampsToBitmaps.put(presentationTimeUs, outputBitmap);
releaseOutputTextureCallback.release(presentationTimeUs);
}
private static Bitmap createBitmapFromCurrentGlFrameBuffer(
int width, int height, boolean useHighPrecisionColorComponents) throws GlUtil.GlException {
if (!useHighPrecisionColorComponents) {
return BitmapPixelTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer(width, height);
}
checkState(Util.SDK_INT > 26, "useHighPrecisionColorComponents only supported on API 26+");
return BitmapPixelTestUtil.createFp16BitmapFromCurrentGlFramebuffer(width, height);
}
}
...@@ -366,10 +366,12 @@ public final class VideoFrameProcessorTestRunner { ...@@ -366,10 +366,12 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessor.queueInputTexture(inputTexture.getTexId(), pts); videoFrameProcessor.queueInputTexture(inputTexture.getTexId(), pts);
} }
/** {@link #endFrameProcessing(long)} with {@link #VIDEO_FRAME_PROCESSING_WAIT_MS} applied. */
public void endFrameProcessing() throws InterruptedException { public void endFrameProcessing() throws InterruptedException {
endFrameProcessing(VIDEO_FRAME_PROCESSING_WAIT_MS); endFrameProcessing(VIDEO_FRAME_PROCESSING_WAIT_MS);
} }
/** Have the {@link VideoFrameProcessor} finish processing. */
public void endFrameProcessing(long videoFrameProcessingWaitTime) throws InterruptedException { public void endFrameProcessing(long videoFrameProcessingWaitTime) throws InterruptedException {
videoFrameProcessor.signalEndOfInput(); videoFrameProcessor.signalEndOfInput();
Thread.sleep(videoFrameProcessingWaitTime); Thread.sleep(videoFrameProcessingWaitTime);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment