Commit fd9beb68 by huangdarwin Committed by Rohit Singh

HDR: Add HDR pixel tests.

Implement HDR input support for texture output, and add HDR pixel tests.

PiperOrigin-RevId: 523417701
parent b743ad9f
...@@ -97,7 +97,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor { ...@@ -97,7 +97,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/** /**
* Sets the {@link TextureOutputListener}. * Sets the {@link TextureOutputListener}.
* *
* <p>If set, the {@link VideoFrameProcessor} will output to an OpenGL texture. * <p>If set, the {@link VideoFrameProcessor} will output to an OpenGL texture, accessible via
* {@link TextureOutputListener#onTextureRendered}. Otherwise, no texture will be rendered to.
*/ */
@VisibleForTesting @VisibleForTesting
@CanIgnoreReturnValue @CanIgnoreReturnValue
......
...@@ -458,7 +458,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -458,7 +458,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
int outputTexId = int outputTexId =
GlUtil.createTexture( GlUtil.createTexture(
outputWidth, outputHeight, /* useHighPrecisionColorComponents= */ false); outputWidth,
outputHeight,
/* useHighPrecisionColorComponents= */ ColorInfo.isTransferHdr(outputColorInfo));
outputTexture = outputTexture =
glObjectsProvider.createBuffersForTexture(outputTexId, outputWidth, outputHeight); glObjectsProvider.createBuffersForTexture(outputTexId, outputWidth, outputHeight);
} }
......
...@@ -15,24 +15,38 @@ ...@@ -15,24 +15,38 @@
*/ */
package com.google.android.exoplayer2.transformer.mh; package com.google.android.exoplayer2.transformer.mh;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE; import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888; import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.readBitmap; import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.readBitmap;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_FORMAT;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.recordTestSkipped;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.view.Surface; import android.view.Surface;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.effect.BitmapOverlay; import com.google.android.exoplayer2.effect.BitmapOverlay;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor; import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.effect.OverlayEffect; import com.google.android.exoplayer2.effect.OverlayEffect;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner; import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner;
import com.google.android.exoplayer2.transformer.AndroidTestUtil;
import com.google.android.exoplayer2.transformer.EncoderUtil;
import com.google.android.exoplayer2.util.GlTextureInfo; import com.google.android.exoplayer2.util.GlTextureInfo;
import com.google.android.exoplayer2.util.GlUtil; import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After; import org.junit.After;
...@@ -54,19 +68,38 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest { ...@@ -54,19 +68,38 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
private static final String BITMAP_OVERLAY_PNG_ASSET_PATH = private static final String BITMAP_OVERLAY_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png"; "media/bitmap/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png";
private static final String OVERLAY_PNG_ASSET_PATH = "media/bitmap/input_images/media3test.png"; private static final String OVERLAY_PNG_ASSET_PATH = "media/bitmap/input_images/media3test.png";
/** Input video of which we only use the first frame. */
private static final String ORIGINAL_HLG10_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original_hlg10.png";
private static final String ORIGINAL_HDR10_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original_hdr10.png";
/** Input SDR video of which we only use the first frame. */
private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4"; private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4";
/** Input PQ video of which we only use the first frame. */
private static final String INPUT_PQ_MP4_ASSET_STRING = "media/mp4/hdr10-720p.mp4";
/** Input HLG video of which we only use the first frame. */
private static final String INPUT_HLG10_MP4_ASSET_STRING = "media/mp4/hlg-1080p.mp4";
private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner; private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
@After @After
public void release() { public void release() {
checkNotNull(videoFrameProcessorTestRunner).release(); if (videoFrameProcessorTestRunner != null) {
videoFrameProcessorTestRunner.release();
}
} }
@Test @Test
public void noEffects_matchesGoldenFile() throws Exception { public void noEffects_matchesGoldenFile() throws Exception {
String testId = "noEffects_matchesGoldenFile"; String testId = "noEffects_matchesGoldenFile";
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ null)) {
return;
}
videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build(); videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
...@@ -82,6 +115,13 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest { ...@@ -82,6 +115,13 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
@Test @Test
public void bitmapOverlay_matchesGoldenFile() throws Exception { public void bitmapOverlay_matchesGoldenFile() throws Exception {
String testId = "bitmapOverlay_matchesGoldenFile"; String testId = "bitmapOverlay_matchesGoldenFile";
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
getApplicationContext(),
testId,
/* inputFormat= */ MP4_ASSET_FORMAT,
/* outputFormat= */ null)) {
return;
}
Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH); Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH);
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap); BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
videoFrameProcessorTestRunner = videoFrameProcessorTestRunner =
...@@ -99,8 +139,79 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest { ...@@ -99,8 +139,79 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE); .isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
} }
// TODO(b/227624622): Add a test for HDR input after BitmapPixelTestUtil can read HDR bitmaps, @Test
// using GlEffectWrapper to ensure usage of intermediate textures. public void noEffects_hlg10Input_matchesGoldenFile() throws Exception {
String testId = "noEffects_hlg10Input_matchesGoldenFile";
Context context = getApplicationContext();
Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
if (!deviceSupportsHdrEditing(format)) {
recordTestSkipped(context, testId, "No HLG editing support");
return;
}
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
ColorInfo hlg10ColorInfo =
new ColorInfo.Builder()
.setColorSpace(C.COLOR_SPACE_BT2020)
.setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_HLG)
.build();
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setInputColorInfo(hlg10ColorInfo)
.setOutputColorInfo(hlg10ColorInfo)
.setVideoAssetPath(INPUT_HLG10_MP4_ASSET_STRING)
.build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_HLG10_PNG_ASSET_PATH);
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16(
expectedBitmap, actualBitmap);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
}
@Test
public void noEffects_hdr10Input_matchesGoldenFile() throws Exception {
String testId = "noEffects_hdr10Input_matchesGoldenFile";
Context context = getApplicationContext();
Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT;
if (!deviceSupportsHdrEditing(format)) {
recordTestSkipped(context, testId, "No HLG editing support");
return;
}
if (AndroidTestUtil.skipAndLogIfFormatsUnsupported(
context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) {
return;
}
ColorInfo hdr10ColorInfo =
new ColorInfo.Builder()
.setColorSpace(C.COLOR_SPACE_BT2020)
.setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_ST2084)
.build();
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setInputColorInfo(hdr10ColorInfo)
.setOutputColorInfo(hdr10ColorInfo)
.setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING)
.build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_HDR10_PNG_ASSET_PATH);
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16(
expectedBitmap, actualBitmap);
assertThat(averagePixelAbsoluteDifference)
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16);
}
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder( private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) { String testId) {
...@@ -124,11 +235,13 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest { ...@@ -124,11 +235,13 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
private static final class TextureBitmapReader private static final class TextureBitmapReader
implements VideoFrameProcessorTestRunner.BitmapReader { implements VideoFrameProcessorTestRunner.BitmapReader {
// TODO(b/239172735): This outputs an incorrect black output image on emulators. // TODO(b/239172735): This outputs an incorrect black output image on emulators.
private boolean useHighPrecisionColorComponents;
private @MonotonicNonNull Bitmap outputBitmap; private @MonotonicNonNull Bitmap outputBitmap;
@Override @Override
public Surface getSurface(int width, int height) { public Surface getSurface(int width, int height, boolean useHighPrecisionColorComponents) {
this.useHighPrecisionColorComponents = useHighPrecisionColorComponents;
int texId; int texId;
try { try {
texId = GlUtil.createExternalTexture(); texId = GlUtil.createExternalTexture();
...@@ -149,8 +262,23 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest { ...@@ -149,8 +262,23 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
GlUtil.focusFramebufferUsingCurrentContext( GlUtil.focusFramebufferUsingCurrentContext(
outputTexture.fboId, outputTexture.width, outputTexture.height); outputTexture.fboId, outputTexture.width, outputTexture.height);
outputBitmap = outputBitmap =
BitmapPixelTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer( createBitmapFromCurrentGlFrameBuffer(
outputTexture.width, outputTexture.height); outputTexture.width, outputTexture.height, useHighPrecisionColorComponents);
} }
private static Bitmap createBitmapFromCurrentGlFrameBuffer(
int width, int height, boolean useHighPrecisionColorComponents) throws GlUtil.GlException {
if (!useHighPrecisionColorComponents) {
return BitmapPixelTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer(width, height);
}
checkState(Util.SDK_INT > 26, "useHighPrecisionColorComponents only supported on API 26+");
return BitmapPixelTestUtil.createFp16BitmapFromCurrentGlFramebuffer(width, height);
}
}
private static boolean deviceSupportsHdrEditing(Format format) {
return !EncoderUtil.getSupportedEncodersForHdrEditing(
checkNotNull(checkNotNull(format).sampleMimeType), format.colorInfo)
.isEmpty();
} }
} }
...@@ -29,6 +29,7 @@ import android.graphics.Matrix; ...@@ -29,6 +29,7 @@ import android.graphics.Matrix;
import android.graphics.PixelFormat; import android.graphics.PixelFormat;
import android.media.Image; import android.media.Image;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLES30;
import android.opengl.GLUtils; import android.opengl.GLUtils;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
...@@ -49,8 +50,9 @@ public class BitmapPixelTestUtil { ...@@ -49,8 +50,9 @@ public class BitmapPixelTestUtil {
private static final String TAG = "BitmapPixelTestUtil"; private static final String TAG = "BitmapPixelTestUtil";
/** /**
* Maximum allowed average pixel difference between the expected and actual edited images in pixel * Maximum allowed average pixel difference between bitmaps generated using emulators.
* difference-based tests, between emulators. *
* <p>This value is for for 8-bit primaries in pixel difference-based tests.
* *
* <p>The value is chosen so that differences in decoder behavior across emulator versions don't * <p>The value is chosen so that differences in decoder behavior across emulator versions don't
* affect whether the test passes, but substantial distortions introduced by changes in tested * affect whether the test passes, but substantial distortions introduced by changes in tested
...@@ -63,8 +65,9 @@ public class BitmapPixelTestUtil { ...@@ -63,8 +65,9 @@ public class BitmapPixelTestUtil {
public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE = 1.f; public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE = 1.f;
/** /**
* Maximum allowed average pixel difference between the expected and actual edited images in pixel * Maximum allowed average pixel difference between bitmaps generated using devices.
* difference-based tests, between devices, or devices and emulators. *
* <p>This value is for for 8-bit primaries in pixel difference-based tests.
* *
* <p>The value is chosen so that differences in decoder behavior across devices don't affect * <p>The value is chosen so that differences in decoder behavior across devices don't affect
* whether the test passes, but substantial distortions introduced by changes in tested components * whether the test passes, but substantial distortions introduced by changes in tested components
...@@ -80,6 +83,23 @@ public class BitmapPixelTestUtil { ...@@ -80,6 +83,23 @@ public class BitmapPixelTestUtil {
public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE = 5.f; public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE = 5.f;
/** /**
* Maximum allowed average pixel difference between bitmaps with 16-bit primaries generated using
* devices.
*
* <p>The value is chosen so that differences in decoder behavior across devices in pixel
* difference-based tests don't affect whether the test passes, but substantial distortions
* introduced by changes in tested components will cause the test to fail.
*
* <p>When the difference is close to the threshold, manually inspect expected/actual bitmaps to
* confirm failure, as it's possible this is caused by a difference in the codec or graphics
* implementation as opposed to an issue in the tested component.
*
* <p>This value is larger than {@link #MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} to support the
* larger variance in decoder outputs between different physical devices and emulators.
*/
public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16 = .01f;
/**
* Reads a bitmap from the specified asset location. * Reads a bitmap from the specified asset location.
* *
* @param assetString Relative path to the asset within the assets directory. * @param assetString Relative path to the asset within the assets directory.
...@@ -134,10 +154,11 @@ public class BitmapPixelTestUtil { ...@@ -134,10 +154,11 @@ public class BitmapPixelTestUtil {
} }
/** /**
* Returns the average difference between the expected and actual bitmaps, calculated using the * Returns the average difference between the expected and actual bitmaps.
* maximum difference across all color channels for each pixel, then divided by the total number *
* of pixels in the image. The bitmap resolutions must match and they must use configuration * <p>Calculated using the maximum difference across all color channels for each pixel, then
* {@link Bitmap.Config#ARGB_8888}. * divided by the total number of pixels in the image. Bitmap resolutions must match and must use
* configuration {@link Bitmap.Config#ARGB_8888}.
* *
* <p>Tries to save a difference bitmap between expected and actual bitmaps. * <p>Tries to save a difference bitmap between expected and actual bitmaps.
* *
...@@ -155,11 +176,9 @@ public class BitmapPixelTestUtil { ...@@ -155,11 +176,9 @@ public class BitmapPixelTestUtil {
Bitmap actual, Bitmap actual,
@Nullable String testId, @Nullable String testId,
@Nullable String differencesBitmapPath) { @Nullable String differencesBitmapPath) {
assertBitmapsMatch(expected, actual);
int width = actual.getWidth(); int width = actual.getWidth();
int height = actual.getHeight(); int height = actual.getHeight();
assertThat(width).isEqualTo(expected.getWidth());
assertThat(height).isEqualTo(expected.getHeight());
assertThat(actual.getConfig()).isEqualTo(Bitmap.Config.ARGB_8888);
long sumMaximumAbsoluteDifferences = 0; long sumMaximumAbsoluteDifferences = 0;
// Debug-only image diff without alpha. To use, set a breakpoint right before the method return // Debug-only image diff without alpha. To use, set a breakpoint right before the method return
// to view the difference between the expected and actual bitmaps. A passing test should show // to view the difference between the expected and actual bitmaps. A passing test should show
...@@ -193,6 +212,53 @@ public class BitmapPixelTestUtil { ...@@ -193,6 +212,53 @@ public class BitmapPixelTestUtil {
} }
/** /**
* Returns the average difference between the expected and actual bitmaps.
*
* <p>Calculated using the maximum difference across all color channels for each pixel, then
* divided by the total number of pixels in the image. Bitmap resolutions must match and must use
* configuration {@link Bitmap.Config#RGBA_F16}.
*
* @param expected The expected {@link Bitmap}.
* @param actual The actual {@link Bitmap} produced by the test.
* @return The average of the maximum absolute pixel-wise differences between the expected and
* actual bitmaps.
*/
@RequiresApi(29) // Bitmap#getColor()
public static float getBitmapAveragePixelAbsoluteDifferenceFp16(Bitmap expected, Bitmap actual) {
assertBitmapsMatch(expected, actual);
int width = actual.getWidth();
int height = actual.getHeight();
float sumMaximumAbsoluteDifferences = 0;
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
Color actualColor = actual.getColor(x, y);
Color expectedColor = expected.getColor(x, y);
float alphaDifference = abs(actualColor.alpha() - expectedColor.alpha());
float redDifference = abs(actualColor.red() - expectedColor.red());
float blueDifference = abs(actualColor.blue() - expectedColor.blue());
float greenDifference = abs(actualColor.green() - expectedColor.green());
float maximumAbsoluteDifference = 0;
maximumAbsoluteDifference = max(maximumAbsoluteDifference, alphaDifference);
maximumAbsoluteDifference = max(maximumAbsoluteDifference, redDifference);
maximumAbsoluteDifference = max(maximumAbsoluteDifference, blueDifference);
maximumAbsoluteDifference = max(maximumAbsoluteDifference, greenDifference);
sumMaximumAbsoluteDifferences += maximumAbsoluteDifference;
}
}
return sumMaximumAbsoluteDifferences / (width * height);
}
private static void assertBitmapsMatch(Bitmap expected, Bitmap actual) {
assertThat(actual.getWidth()).isEqualTo(expected.getWidth());
assertThat(actual.getHeight()).isEqualTo(expected.getHeight());
assertThat(actual.getConfig()).isEqualTo(expected.getConfig());
}
/**
* Returns the average difference between the expected and actual bitmaps, calculated using the * Returns the average difference between the expected and actual bitmaps, calculated using the
* maximum difference across all color channels for each pixel, then divided by the total number * maximum difference across all color channels for each pixel, then divided by the total number
* of pixels in the image, without saving the difference bitmap. See {@link * of pixels in the image, without saving the difference bitmap. See {@link
...@@ -244,7 +310,8 @@ public class BitmapPixelTestUtil { ...@@ -244,7 +310,8 @@ public class BitmapPixelTestUtil {
} }
/** /**
* Creates a bitmap with the values of the current OpenGL framebuffer. * Creates a {@link Bitmap.Config#ARGB_8888} bitmap with the values of the current OpenGL
* framebuffer.
* *
* <p>This method may block until any previously called OpenGL commands are complete. * <p>This method may block until any previously called OpenGL commands are complete.
* *
...@@ -254,16 +321,39 @@ public class BitmapPixelTestUtil { ...@@ -254,16 +321,39 @@ public class BitmapPixelTestUtil {
*/ */
public static Bitmap createArgb8888BitmapFromCurrentGlFramebuffer(int width, int height) public static Bitmap createArgb8888BitmapFromCurrentGlFramebuffer(int width, int height)
throws GlUtil.GlException { throws GlUtil.GlException {
ByteBuffer rgba8888Buffer = ByteBuffer.allocateDirect(width * height * 4); return createBitmapFromCurrentGlFrameBuffer(
// TODO(b/227624622): Add support for reading HDR bitmaps. width, height, /* pixelSize= */ 4, GLES20.GL_UNSIGNED_BYTE, Bitmap.Config.ARGB_8888);
}
/**
* Creates a {@link Bitmap.Config#RGBA_F16} bitmap with the values of the current OpenGL
* framebuffer.
*
* <p>This method may block until any previously called OpenGL commands are complete.
*
* @param width The width of the pixel rectangle to read.
* @param height The height of the pixel rectangle to read.
* @return A {@link Bitmap} with the framebuffer's values.
*/
@RequiresApi(26) // Bitmap.Config.RGBA_F16
public static Bitmap createFp16BitmapFromCurrentGlFramebuffer(int width, int height)
throws GlUtil.GlException {
return createBitmapFromCurrentGlFrameBuffer(
width, height, /* pixelSize= */ 8, GLES30.GL_HALF_FLOAT, Bitmap.Config.RGBA_F16);
}
private static Bitmap createBitmapFromCurrentGlFrameBuffer(
int width, int height, int pixelSize, int glReadPixelsFormat, Bitmap.Config bitmapConfig)
throws GlUtil.GlException {
ByteBuffer pixelBuffer = ByteBuffer.allocateDirect(width * height * pixelSize);
GLES20.glReadPixels( GLES20.glReadPixels(
0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgba8888Buffer); /* x= */ 0, /* y= */ 0, width, height, GLES20.GL_RGBA, glReadPixelsFormat, pixelBuffer);
GlUtil.checkGlError(); GlUtil.checkGlError();
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); Bitmap bitmap = Bitmap.createBitmap(width, height, bitmapConfig);
// According to https://www.khronos.org/opengl/wiki/Pixel_Transfer#Endian_issues, // According to https://www.khronos.org/opengl/wiki/Pixel_Transfer#Endian_issues,
// the colors will have the order RGBA in client memory. This is what the bitmap expects: // the colors will have the order RGBA in client memory. This is what the bitmap expects:
// https://developer.android.com/reference/android/graphics/Bitmap.Config#ARGB_8888. // https://developer.android.com/reference/android/graphics/Bitmap.Config.
bitmap.copyPixelsFromBuffer(rgba8888Buffer); bitmap.copyPixelsFromBuffer(pixelBuffer);
// Flip the bitmap as its positive y-axis points down while OpenGL's positive y-axis points up. // Flip the bitmap as its positive y-axis points down while OpenGL's positive y-axis points up.
return flipBitmapVertically(bitmap); return flipBitmapVertically(bitmap);
} }
...@@ -275,7 +365,6 @@ public class BitmapPixelTestUtil { ...@@ -275,7 +365,6 @@ public class BitmapPixelTestUtil {
* @return The identifier of the newly created texture. * @return The identifier of the newly created texture.
*/ */
public static int createGlTextureFromBitmap(Bitmap bitmap) throws GlUtil.GlException { public static int createGlTextureFromBitmap(Bitmap bitmap) throws GlUtil.GlException {
// TODO(b/227624622): Add support for reading HDR bitmaps.
int texId = int texId =
GlUtil.createTexture( GlUtil.createTexture(
bitmap.getWidth(), bitmap.getHeight(), /* useHighPrecisionColorComponents= */ false); bitmap.getWidth(), bitmap.getHeight(), /* useHighPrecisionColorComponents= */ false);
......
...@@ -278,7 +278,12 @@ public final class VideoFrameProcessorTestRunner { ...@@ -278,7 +278,12 @@ public final class VideoFrameProcessorTestRunner {
new VideoFrameProcessor.Listener() { new VideoFrameProcessor.Listener() {
@Override @Override
public void onOutputSizeChanged(int width, int height) { public void onOutputSizeChanged(int width, int height) {
Surface outputSurface = bitmapReader.getSurface(width, height); Surface outputSurface =
bitmapReader.getSurface(
width,
height,
/* useHighPrecisionColorComponents= */ ColorInfo.isTransferHdr(
outputColorInfo));
checkNotNull(videoFrameProcessor) checkNotNull(videoFrameProcessor)
.setOutputSurfaceInfo(new SurfaceInfo(outputSurface, width, height)); .setOutputSurfaceInfo(new SurfaceInfo(outputSurface, width, height));
} }
...@@ -359,7 +364,7 @@ public final class VideoFrameProcessorTestRunner { ...@@ -359,7 +364,7 @@ public final class VideoFrameProcessorTestRunner {
public interface BitmapReader { public interface BitmapReader {
/** Returns the {@link VideoFrameProcessor} output {@link Surface}. */ /** Returns the {@link VideoFrameProcessor} output {@link Surface}. */
Surface getSurface(int width, int height); Surface getSurface(int width, int height, boolean useHighPrecisionColorComponents);
/** Returns the output {@link Bitmap}. */ /** Returns the output {@link Bitmap}. */
Bitmap getBitmap(); Bitmap getBitmap();
...@@ -378,7 +383,7 @@ public final class VideoFrameProcessorTestRunner { ...@@ -378,7 +383,7 @@ public final class VideoFrameProcessorTestRunner {
@Override @Override
@SuppressLint("WrongConstant") @SuppressLint("WrongConstant")
public Surface getSurface(int width, int height) { public Surface getSurface(int width, int height, boolean useHighPrecisionColorComponents) {
imageReader = imageReader =
ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, /* maxImages= */ 1); ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, /* maxImages= */ 1);
return imageReader.getSurface(); return imageReader.getSurface();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment