Commit 2b79dee7 by tofunmi Committed by Tofunmi Adigun-Hameed

Transformer: Support Texture asset loading

PiperOrigin-RevId: 530888319
parent f20ed116
...@@ -22,14 +22,22 @@ import static com.google.android.exoplayer2.util.MimeTypes.VIDEO_H264; ...@@ -22,14 +22,22 @@ import static com.google.android.exoplayer2.util.MimeTypes.VIDEO_H264;
import static com.google.android.exoplayer2.util.MimeTypes.VIDEO_H265; import static com.google.android.exoplayer2.util.MimeTypes.VIDEO_H265;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.os.Build; import android.os.Build;
import android.util.Pair; import android.util.Pair;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.effect.DefaultGlObjectsProvider;
import com.google.android.exoplayer2.mediacodec.MediaCodecUtil; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
import com.google.android.exoplayer2.util.GlObjectsProvider;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MediaFormatUtil; import com.google.android.exoplayer2.util.MediaFormatUtil;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
...@@ -508,6 +516,37 @@ public final class AndroidTestUtil { ...@@ -508,6 +516,37 @@ public final class AndroidTestUtil {
public static final String MP3_ASSET_URI_STRING = "asset:///media/mp3/test.mp3"; public static final String MP3_ASSET_URI_STRING = "asset:///media/mp3/test.mp3";
/** /**
* Creates the GL objects needed to set up a GL environment including an {@link EGLDisplay} and an
* {@link EGLContext}.
*/
public static EGLContext createOpenGlObjects() throws GlUtil.GlException {
EGLDisplay eglDisplay = GlUtil.createEglDisplay();
int[] configAttributes = GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888;
GlObjectsProvider glObjectsProvider =
new DefaultGlObjectsProvider(/* sharedEglContext= */ null);
EGLContext eglContext =
glObjectsProvider.createEglContext(eglDisplay, /* openGlVersion= */ 2, configAttributes);
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay, configAttributes);
return eglContext;
}
/**
* Generates a {@linkplain android.opengl.GLES10#GL_TEXTURE_2D traditional GLES texture} from the
* given bitmap.
*
* <p>Must have a GL context set up.
*/
public static int generateTextureFromBitmap(Bitmap bitmap) throws GlUtil.GlException {
int texId =
GlUtil.createTexture(
bitmap.getWidth(), bitmap.getHeight(), /* useHighPrecisionColorComponents= */ false);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texId);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
GlUtil.checkGlError();
return texId;
}
/**
* Log in logcat and in an analysis file that this test was skipped. * Log in logcat and in an analysis file that this test was skipped.
* *
* <p>Analysis file is a JSON summarising the test, saved to the application cache. * <p>Analysis file is a JSON summarising the test, saved to the application cache.
......
...@@ -20,11 +20,20 @@ import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSE ...@@ -20,11 +20,20 @@ import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSE
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING; import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING; import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING; import static com.google.android.exoplayer2.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.createOpenGlObjects;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.generateTextureFromBitmap;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertThrows;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.net.Uri; import android.net.Uri;
import android.opengl.EGLContext;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import androidx.test.core.app.ApplicationProvider; import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
...@@ -33,12 +42,19 @@ import com.google.android.exoplayer2.MediaItem; ...@@ -33,12 +42,19 @@ import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.audio.AudioProcessor; import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.SonicAudioProcessor; import com.google.android.exoplayer2.audio.SonicAudioProcessor;
import com.google.android.exoplayer2.effect.Contrast; import com.google.android.exoplayer2.effect.Contrast;
import com.google.android.exoplayer2.effect.DefaultGlObjectsProvider;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.effect.FrameCache; import com.google.android.exoplayer2.effect.FrameCache;
import com.google.android.exoplayer2.effect.Presentation; import com.google.android.exoplayer2.effect.Presentation;
import com.google.android.exoplayer2.effect.RgbFilter; import com.google.android.exoplayer2.effect.RgbFilter;
import com.google.android.exoplayer2.effect.TimestampWrapper; import com.google.android.exoplayer2.effect.TimestampWrapper;
import com.google.android.exoplayer2.upstream.DataSourceBitmapLoader;
import com.google.android.exoplayer2.util.Effect; import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
...@@ -50,6 +66,7 @@ import org.junit.runner.RunWith; ...@@ -50,6 +66,7 @@ import org.junit.runner.RunWith;
public class TransformerEndToEndTest { public class TransformerEndToEndTest {
private final Context context = ApplicationProvider.getApplicationContext(); private final Context context = ApplicationProvider.getApplicationContext();
private volatile @MonotonicNonNull TextureAssetLoader textureAssetLoader;
@Test @Test
public void videoEditing_withImageInput_completesWithCorrectFrameCountAndDuration() public void videoEditing_withImageInput_completesWithCorrectFrameCountAndDuration()
...@@ -99,6 +116,118 @@ public class TransformerEndToEndTest { ...@@ -99,6 +116,118 @@ public class TransformerEndToEndTest {
} }
@Test @Test
public void videoEditing_withTextureInput_completesWithCorrectFrameCountAndDuration()
throws Exception {
String testId = "videoEditing_withTextureInput_completesWithCorrectFrameCountAndDuration";
Bitmap bitmap =
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
Transformer transformer =
new Transformer.Builder(context)
.setAssetLoaderFactory(
new TestTextureAssetLoaderFactory(bitmap.getWidth(), bitmap.getHeight()))
.build();
int expectedFrameCount = 2;
EGLContext currentContext = createOpenGlObjects();
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(new DefaultGlObjectsProvider(currentContext))
.build();
ImmutableList<Effect> videoEffects = ImmutableList.of(Presentation.createForHeight(480));
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY))
.setDurationUs(C.MICROS_PER_SECOND)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
videoEffects,
videoFrameProcessorFactory))
.build();
int texId = generateTextureFromBitmap(bitmap);
HandlerThread textureQueuingThread = new HandlerThread("textureQueuingThread");
textureQueuingThread.start();
Looper looper = checkNotNull(textureQueuingThread.getLooper());
Handler textureHandler =
new Handler(looper) {
@Override
public void handleMessage(Message msg) {
if (textureAssetLoader != null
&& textureAssetLoader.queueInputTexture(texId, /* presentationTimeUs= */ 0)) {
textureAssetLoader.queueInputTexture(
texId, /* presentationTimeUs= */ C.MICROS_PER_SECOND / 2);
textureAssetLoader.signalEndOfVideoInput();
return;
}
sendEmptyMessage(0);
}
};
textureHandler.sendEmptyMessage(0);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);
assertThat(result.exportResult.videoFrameCount).isEqualTo(expectedFrameCount);
// Expected timestamp of the last frame.
assertThat(result.exportResult.durationMs).isEqualTo(C.MILLIS_PER_SECOND / 2);
}
@Test
public void videoTranscoding_withTextureInput_completesWithCorrectFrameCountAndDuration()
throws Exception {
String testId = "videoTranscoding_withTextureInput_completesWithCorrectFrameCountAndDuration";
Bitmap bitmap =
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
Transformer transformer =
new Transformer.Builder(context)
.setAssetLoaderFactory(
new TestTextureAssetLoaderFactory(bitmap.getWidth(), bitmap.getHeight()))
.build();
int expectedFrameCount = 2;
EGLContext currentContext = createOpenGlObjects();
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(new DefaultGlObjectsProvider(currentContext))
.build();
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY))
.setDurationUs(C.MICROS_PER_SECOND)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
/* videoEffects= */ ImmutableList.of(),
videoFrameProcessorFactory))
.build();
int texId = generateTextureFromBitmap(bitmap);
HandlerThread textureQueuingThread = new HandlerThread("textureQueuingThread");
textureQueuingThread.start();
Looper looper = checkNotNull(textureQueuingThread.getLooper());
Handler textureHandler =
new Handler(looper) {
@Override
public void handleMessage(Message msg) {
if (textureAssetLoader != null
&& textureAssetLoader.queueInputTexture(texId, /* presentationTimeUs= */ 0)) {
textureAssetLoader.queueInputTexture(
texId, /* presentationTimeUs= */ C.MICROS_PER_SECOND / 2);
textureAssetLoader.signalEndOfVideoInput();
return;
}
sendEmptyMessage(0);
}
};
textureHandler.sendEmptyMessage(0);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);
assertThat(result.exportResult.videoFrameCount).isEqualTo(expectedFrameCount);
// Expected timestamp of the last frame.
assertThat(result.exportResult.durationMs).isEqualTo(C.MILLIS_PER_SECOND / 2);
}
@Test
public void videoEditing_completesWithConsistentFrameCount() throws Exception { public void videoEditing_completesWithConsistentFrameCount() throws Exception {
Transformer transformer = Transformer transformer =
new Transformer.Builder(context) new Transformer.Builder(context)
...@@ -366,6 +495,34 @@ public class TransformerEndToEndTest { ...@@ -366,6 +495,34 @@ public class TransformerEndToEndTest {
assertThat(result.exportResult.durationMs).isEqualTo(3100); assertThat(result.exportResult.durationMs).isEqualTo(3100);
} }
private final class TestTextureAssetLoaderFactory implements AssetLoader.Factory {
private final int width;
private final int height;
TestTextureAssetLoaderFactory(int width, int height) {
this.width = width;
this.height = height;
}
@Override
public TextureAssetLoader createAssetLoader(
EditedMediaItem editedMediaItem, Looper looper, AssetLoader.Listener listener) {
Format format = new Format.Builder().setWidth(width).setHeight(height).build();
OnInputFrameProcessedListener frameProcessedListener =
texId -> {
try {
GlUtil.deleteTexture(texId);
} catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e);
}
};
textureAssetLoader =
new TextureAssetLoader(editedMediaItem, listener, format, frameProcessedListener);
return textureAssetLoader;
}
}
private static final class VideoUnsupportedEncoderFactory implements Codec.EncoderFactory { private static final class VideoUnsupportedEncoderFactory implements Codec.EncoderFactory {
private final Codec.EncoderFactory encoderFactory; private final Codec.EncoderFactory encoderFactory;
......
...@@ -19,6 +19,7 @@ import android.graphics.Bitmap; ...@@ -19,6 +19,7 @@ import android.graphics.Bitmap;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.ColorInfo;
/** Consumer of encoded media samples, raw audio or raw video frames. */ /** Consumer of encoded media samples, raw audio or raw video frames. */
...@@ -81,6 +82,31 @@ public interface SampleConsumer { ...@@ -81,6 +82,31 @@ public interface SampleConsumer {
// Methods to pass raw video input. // Methods to pass raw video input.
/** /**
* Provides a {@link OnInputFrameProcessedListener} to the consumer.
*
* <p>Should only be used for raw video data when input is provided by texture ID.
*
* @param listener The {@link OnInputFrameProcessedListener}.
*/
default void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
throw new UnsupportedOperationException();
}
/**
* Attempts to provide an input texture to the consumer.
*
* <p>Should only be used for raw video data.
*
* @param texId The ID of the texture to queue to the consumer.
* @param presentationTimeUs The presentation time for the texture, in microseconds.
* @return Whether the texture was successfully queued. If {@code false}, the caller should try
* again later.
*/
default boolean queueInputTexture(int texId, long presentationTimeUs) {
throw new UnsupportedOperationException();
}
/**
* Returns the input {@link Surface}, where the consumer reads input frames from. * Returns the input {@link Surface}, where the consumer reads input frames from.
* *
* <p>Should only be used for raw video data. * <p>Should only be used for raw video data.
......
...@@ -34,6 +34,7 @@ import com.google.android.exoplayer2.decoder.DecoderInputBuffer; ...@@ -34,6 +34,7 @@ import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.util.Clock; import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.HandlerWrapper; import com.google.android.exoplayer2.util.HandlerWrapper;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
...@@ -423,6 +424,24 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -423,6 +424,24 @@ import java.util.concurrent.atomic.AtomicInteger;
} }
@Override @Override
public boolean queueInputTexture(int texId, long presentationTimeUs) {
long globalTimestampUs = totalDurationUs + presentationTimeUs;
if (isLooping && globalTimestampUs >= maxSequenceDurationUs) {
if (isMaxSequenceDurationUsFinal && !videoLoopingEnded) {
videoLoopingEnded = true;
signalEndOfVideoInput();
}
return false;
}
return sampleConsumer.queueInputTexture(texId, presentationTimeUs);
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
sampleConsumer.setOnInputFrameProcessedListener(listener);
}
@Override
public Surface getInputSurface() { public Surface getInputSurface() {
return sampleConsumer.getInputSurface(); return sampleConsumer.getInputSurface();
} }
......
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.transformer.ExportException.ERROR_CODE_UNSPECIFIED;
import static com.google.android.exoplayer2.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
import static com.google.android.exoplayer2.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static java.lang.Math.round;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
import com.google.common.collect.ImmutableMap;
/**
* An {@link AssetLoader} implementation that loads videos from {@linkplain
* android.opengl.GLES10#GL_TEXTURE_2D traditional GLES texture} instances.
*
* <p>Typically instantiated in a custom {@link AssetLoader.Factory} saving a reference to the
* created {@link TextureAssetLoader}. Input is provided calling {@link #queueInputTexture} to
* provide all the video frames, then {@link #signalEndOfVideoInput() signalling the end of input}
* when finished.
*/
public final class TextureAssetLoader implements AssetLoader {
private final EditedMediaItem editedMediaItem;
private final Listener assetLoaderListener;
private final Format format;
private final OnInputFrameProcessedListener frameProcessedListener;
@Nullable private SampleConsumer sampleConsumer;
private @Transformer.ProgressState int progressState;
private long lastQueuedPresentationTimeUs;
private boolean isTrackAdded;
/**
* Creates an instance.
*
* <p>The {@link EditedMediaItem#durationUs}, {@link Format#width} and {@link Format#height} must
* be set.
*/
public TextureAssetLoader(
EditedMediaItem editedMediaItem,
Listener assetLoaderListener,
Format format,
OnInputFrameProcessedListener frameProcessedListener) {
checkArgument(editedMediaItem.durationUs != C.TIME_UNSET);
checkArgument(format.height != Format.NO_VALUE && format.width != Format.NO_VALUE);
this.editedMediaItem = editedMediaItem;
this.assetLoaderListener = assetLoaderListener;
this.format = format.buildUpon().setSampleMimeType(MimeTypes.VIDEO_RAW).build();
this.frameProcessedListener = frameProcessedListener;
progressState = PROGRESS_STATE_NOT_STARTED;
}
@Override
public void start() {
progressState = PROGRESS_STATE_AVAILABLE;
assetLoaderListener.onDurationUs(editedMediaItem.durationUs);
assetLoaderListener.onTrackCount(1);
}
@Override
public @Transformer.ProgressState int getProgress(ProgressHolder progressHolder) {
if (progressState == PROGRESS_STATE_AVAILABLE) {
progressHolder.progress =
round((lastQueuedPresentationTimeUs / (float) editedMediaItem.durationUs) * 100);
}
return progressState;
}
@Override
public ImmutableMap<Integer, String> getDecoderNames() {
return ImmutableMap.of();
}
@Override
public void release() {
isTrackAdded = false;
progressState = PROGRESS_STATE_NOT_STARTED;
sampleConsumer = null;
}
/**
* Attempts to provide an input texture.
*
* <p>Must be called on the same thread as {@link #signalEndOfVideoInput}.
*
* @param texId The ID of the texture to queue.
* @param presentationTimeUs The presentation time for the texture, in microseconds.
* @return Whether the texture was successfully queued. If {@code false}, the caller should try
* again later.
*/
public boolean queueInputTexture(int texId, long presentationTimeUs) {
try {
if (!isTrackAdded) {
assetLoaderListener.onTrackAdded(format, SUPPORTED_OUTPUT_TYPE_DECODED);
isTrackAdded = true;
}
if (sampleConsumer == null) {
sampleConsumer = assetLoaderListener.onOutputFormat(format);
if (sampleConsumer == null) {
return false;
} else {
sampleConsumer.setOnInputFrameProcessedListener(frameProcessedListener);
}
}
if (!sampleConsumer.queueInputTexture(texId, presentationTimeUs)) {
return false;
}
lastQueuedPresentationTimeUs = presentationTimeUs;
return true;
} catch (ExportException e) {
assetLoaderListener.onError(e);
} catch (RuntimeException e) {
assetLoaderListener.onError(ExportException.createForAssetLoader(e, ERROR_CODE_UNSPECIFIED));
}
return false;
}
/**
* Signals that no further input frames will be rendered.
*
* <p>Must be called on the same thread as {@link #queueInputTexture}.
*/
public void signalEndOfVideoInput() {
try {
checkNotNull(sampleConsumer).signalEndOfVideoInput();
} catch (RuntimeException e) {
assetLoaderListener.onError(ExportException.createForAssetLoader(e, ERROR_CODE_UNSPECIFIED));
}
}
}
...@@ -24,6 +24,7 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument; ...@@ -24,6 +24,7 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_BITMAP; import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_SURFACE; import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static com.google.android.exoplayer2.video.ColorInfo.SDR_BT709_LIMITED; import static com.google.android.exoplayer2.video.ColorInfo.SDR_BT709_LIMITED;
import static com.google.android.exoplayer2.video.ColorInfo.SRGB_BT709_FULL; import static com.google.android.exoplayer2.video.ColorInfo.SRGB_BT709_FULL;
import static com.google.android.exoplayer2.video.ColorInfo.isTransferHdr; import static com.google.android.exoplayer2.video.ColorInfo.isTransferHdr;
...@@ -53,6 +54,7 @@ import com.google.android.exoplayer2.util.SurfaceInfo; ...@@ -53,6 +54,7 @@ import com.google.android.exoplayer2.util.SurfaceInfo;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException; import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor; import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
import com.google.android.exoplayer2.video.ColorInfo; import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
...@@ -213,14 +215,8 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -213,14 +215,8 @@ import org.checkerframework.dataflow.qual.Pure;
boolean isLast) { boolean isLast) {
if (trackFormat != null) { if (trackFormat != null) {
Size decodedSize = getDecodedSize(trackFormat); Size decodedSize = getDecodedSize(trackFormat);
String mimeType = checkNotNull(trackFormat.sampleMimeType); videoFrameProcessor.registerInputStream(
if (MimeTypes.isVideo(mimeType)) { getInputType(checkNotNull(trackFormat.sampleMimeType)));
videoFrameProcessor.registerInputStream(INPUT_TYPE_SURFACE);
} else if (MimeTypes.isImage(mimeType)) {
videoFrameProcessor.registerInputStream(INPUT_TYPE_BITMAP);
} else {
throw new IllegalArgumentException("MIME type not supported " + mimeType);
}
videoFrameProcessor.setInputFrameInfo( videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight()) new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight())
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio) .setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
...@@ -237,6 +233,17 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -237,6 +233,17 @@ import org.checkerframework.dataflow.qual.Pure;
} }
@Override @Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
videoFrameProcessor.setOnInputFrameProcessedListener(listener);
}
@Override
public boolean queueInputTexture(int texId, long presentationTimeUs) {
videoFrameProcessor.queueInputTexture(texId, presentationTimeUs);
return true;
}
@Override
public Surface getInputSurface() { public Surface getInputSurface() {
return videoFrameProcessor.getInputSurface(); return videoFrameProcessor.getInputSurface();
} }
...@@ -308,6 +315,19 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -308,6 +315,19 @@ import org.checkerframework.dataflow.qual.Pure;
return encoderWrapper.isEnded(); return encoderWrapper.isEnded();
} }
private static @VideoFrameProcessor.InputType int getInputType(String sampleMimeType) {
if (MimeTypes.isImage(sampleMimeType)) {
return INPUT_TYPE_BITMAP;
}
if (sampleMimeType.equals(MimeTypes.VIDEO_RAW)) {
return INPUT_TYPE_TEXTURE_ID;
}
if (MimeTypes.isVideo(sampleMimeType)) {
return INPUT_TYPE_SURFACE;
}
throw new IllegalArgumentException("MIME type not supported " + sampleMimeType);
}
private static Size getDecodedSize(Format format) { private static Size getDecodedSize(Format format) {
// The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees. // The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
int decodedWidth = (format.rotationDegrees % 180 == 0) ? format.width : format.height; int decodedWidth = (format.rotationDegrees % 180 == 0) ? format.width : format.height;
......
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.robolectric.RobolectricUtil.runLooperUntil;
import static com.google.common.truth.Truth.assertThat;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.shadows.ShadowSystemClock;
/** Unit tests for {@link TextureAssetLoader}. */
@RunWith(AndroidJUnit4.class)
public class TextureAssetLoaderTest {
@Test
public void textureAssetLoader_callsListenerCallbacksInRightOrder() throws Exception {
HandlerThread assetLoaderThread = new HandlerThread("AssetLoaderThread");
assetLoaderThread.start();
Looper assetLoaderLooper = assetLoaderThread.getLooper();
AtomicReference<Exception> exceptionRef = new AtomicReference<>();
AtomicBoolean isOutputFormatSet = new AtomicBoolean();
AssetLoader.Listener listener =
new AssetLoader.Listener() {
private volatile boolean isDurationSet;
private volatile boolean isTrackCountSet;
private volatile boolean isTrackAdded;
@Override
public void onDurationUs(long durationUs) {
// Sleep to increase the chances of the test failing.
sleep();
isDurationSet = true;
}
@Override
public void onTrackCount(int trackCount) {
// Sleep to increase the chances of the test failing.
sleep();
isTrackCountSet = true;
}
@Override
public boolean onTrackAdded(
Format inputFormat, @AssetLoader.SupportedOutputTypes int supportedOutputTypes) {
if (!isDurationSet) {
exceptionRef.set(
new IllegalStateException("onTrackAdded() called before onDurationUs()"));
} else if (!isTrackCountSet) {
exceptionRef.set(
new IllegalStateException("onTrackAdded() called before onTrackCount()"));
}
sleep();
isTrackAdded = true;
return false;
}
@Override
public SampleConsumer onOutputFormat(Format format) {
if (!isTrackAdded) {
exceptionRef.set(
new IllegalStateException("onOutputFormat() called before onTrackAdded()"));
}
isOutputFormatSet.set(true);
return new FakeSampleConsumer();
}
@Override
public void onError(ExportException e) {
exceptionRef.set(e);
}
private void sleep() {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
exceptionRef.set(e);
}
}
};
TextureAssetLoader assetLoader = getAssetLoader(listener);
new Handler(assetLoaderLooper).post(() -> runTextureAssetLoader(assetLoader));
runLooperUntil(
Looper.myLooper(),
() -> {
ShadowSystemClock.advanceBy(Duration.ofMillis(10));
return isOutputFormatSet.get() || exceptionRef.get() != null;
});
assertThat(exceptionRef.get()).isNull();
}
private static void runTextureAssetLoader(TextureAssetLoader assetLoader) {
assetLoader.start();
assetLoader.queueInputTexture(/* texId= */ 0, /* presentationTimeUs= */ 0);
assetLoader.signalEndOfVideoInput();
}
private static TextureAssetLoader getAssetLoader(AssetLoader.Listener listener) {
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(new MediaItem.Builder().build())
.setDurationUs(C.MICROS_PER_SECOND)
.build();
Format format = new Format.Builder().setWidth(10).setHeight(10).build();
OnInputFrameProcessedListener frameProcessedListener = unused -> {};
return new TextureAssetLoader(editedMediaItem, listener, format, frameProcessedListener);
}
private static final class FakeSampleConsumer implements SampleConsumer {
@Override
public boolean queueInputTexture(int texId, long presentationTimeUs) {
return true;
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {}
@Override
public void signalEndOfVideoInput() {}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment