Commit a43ef109 by claincly Committed by Tofunmi Adigun-Hameed

Enable switching input types via InputMultiplexer.

PiperOrigin-RevId: 529624205
parent 5dbbff49
......@@ -77,13 +77,12 @@ public interface VideoFrameProcessor {
* @param debugViewProvider A {@link DebugViewProvider}.
* @param inputColorInfo The {@link ColorInfo} for input frames.
* @param outputColorInfo The {@link ColorInfo} for output frames.
* @param inputType The {@link InputType}.
* @param renderFramesAutomatically If {@code true}, the instance will render output frames to
* the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as
* {@link VideoFrameProcessor} is done processing them. If {@code false}, the {@link
* VideoFrameProcessor} will block until {@link #renderOutputFrame(long)} is called, to
* render or drop the frame.
* @param executor The {@link Executor} on which the {@code listener} is invoked.
* @param listenerExecutor The {@link Executor} on which the {@code listener} is invoked.
* @param listener A {@link Listener}.
* @return A new instance.
* @throws VideoFrameProcessingException If a problem occurs while creating the {@link
......@@ -95,9 +94,8 @@ public interface VideoFrameProcessor {
DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
@InputType int inputType,
boolean renderFramesAutomatically,
Executor executor,
Executor listenerExecutor,
Listener listener)
throws VideoFrameProcessingException;
}
......@@ -153,15 +151,14 @@ public interface VideoFrameProcessor {
/**
* Provides an input {@link Bitmap} to the {@link VideoFrameProcessor}.
*
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_BITMAP}.
*
* <p>Can be called on any thread.
*
* @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
* second.
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_BITMAP bitmap input}.
*/
// TODO(b/262693274): Remove duration and frameRate parameters when EditedMediaItem can be
// signalled down to the processors.
......@@ -171,10 +168,10 @@ public interface VideoFrameProcessor {
* Returns the input {@link Surface}, where {@link VideoFrameProcessor} consumes input frames
* from.
*
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_SURFACE}.
*
* <p>Can be called on any thread.
*
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_SURFACE surface input}.
*/
Surface getInputSurface();
......@@ -206,11 +203,10 @@ public interface VideoFrameProcessor {
*
* <p>Must be called before rendering a frame to the input surface.
*
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_SURFACE}.
*
* <p>Can be called on any thread.
*
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_SURFACE surface input}.
* @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link
* #setInputFrameInfo(FrameInfo)}.
*/
......@@ -278,10 +274,10 @@ public interface VideoFrameProcessor {
* <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this
* method are no longer considered to be registered when this method returns.
*
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_SURFACE}.
*
* <p>{@link Listener} methods invoked prior to calling this method should be ignored.
*
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_SURFACE surface input}.
*/
void flush();
......
......@@ -22,7 +22,6 @@ import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static java.lang.Math.max;
import static java.lang.Math.min;
......@@ -2022,9 +2021,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
DebugViewProvider.NONE,
inputAndOutputColorInfos.first,
inputAndOutputColorInfos.second,
INPUT_TYPE_SURFACE,
/* renderFramesAutomatically= */ false,
/* executor= */ handler::post,
/* listenerExecutor= */ handler::post,
new VideoFrameProcessor.Listener() {
@Override
public void onOutputSizeChanged(int width, int height) {
......@@ -2075,6 +2073,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
throw new IllegalStateException();
}
});
videoFrameProcessor.registerInputStream(VideoFrameProcessor.INPUT_TYPE_SURFACE);
this.initialStreamOffsetUs = initialStreamOffsetUs;
} catch (Exception e) {
throw renderer.createRendererException(
......
......@@ -301,7 +301,6 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
DebugViewProvider.NONE,
/* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
INPUT_TYPE_SURFACE,
renderFramesAutomatically,
MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
......@@ -341,7 +340,6 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
videoFrameProcessingEndedCountDownLatch.countDown();
}
}));
defaultVideoFrameProcessor
.getTaskExecutor()
.submit(
......
......@@ -41,14 +41,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
// The queue holds all bitmaps with one or more frames pending to be sent downstream.
private final Queue<BitmapFrameSequenceInfo> pendingBitmaps;
private @MonotonicNonNull GlTextureInfo currentGlTextureInfo;
private int downstreamShaderProgramCapacity;
private int framesToQueueForCurrentBitmap;
private double currentPresentationTimeUs;
private boolean useHdr;
private boolean inputEnded;
private boolean currentInputStreamEnded;
/**
* Creates a new instance.
*
......@@ -78,7 +76,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
public void queueInputBitmap(
Bitmap inputBitmap, long durationUs, long offsetUs, float frameRate, boolean useHdr) {
videoFrameProcessingTaskExecutor.submit(
() -> setupBitmap(inputBitmap, durationUs, offsetUs, frameRate, useHdr));
() -> {
setupBitmap(inputBitmap, durationUs, offsetUs, frameRate, useHdr);
currentInputStreamEnded = false;
});
}
@Override
......@@ -89,7 +90,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void signalEndOfCurrentInputStream() {
// Do nothing here. End of current input signaling is handled in maybeQueueToShaderProgram().
signalEndOfInput();
}
@Override
......@@ -99,7 +100,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (framesToQueueForCurrentBitmap == 0 && pendingBitmaps.isEmpty()) {
shaderProgram.signalEndOfCurrentInputStream();
} else {
inputEnded = true;
currentInputStreamEnded = true;
}
});
}
......@@ -118,9 +119,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
});
}
// Methods that must be called on the GL thread.
private void setupBitmap(
Bitmap bitmap, long durationUs, long offsetUs, float frameRate, boolean useHdr)
throws VideoFrameProcessingException {
......@@ -128,7 +127,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
int framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND));
double frameDurationUs = C.MICROS_PER_SECOND / frameRate;
pendingBitmaps.add(new BitmapFrameSequenceInfo(bitmap, offsetUs, frameDurationUs, framesToAdd));
maybeQueueToShaderProgram();
}
......@@ -136,7 +134,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (pendingBitmaps.isEmpty() || downstreamShaderProgramCapacity == 0) {
return;
}
BitmapFrameSequenceInfo currentBitmapInfo = checkNotNull(pendingBitmaps.peek());
if (framesToQueueForCurrentBitmap == 0) {
Bitmap bitmap = currentBitmapInfo.bitmap;
......@@ -166,24 +163,21 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
bitmap.getWidth(),
bitmap.getHeight());
}
framesToQueueForCurrentBitmap--;
downstreamShaderProgramCapacity--;
shaderProgram.queueInputFrame(
checkNotNull(currentGlTextureInfo), round(currentPresentationTimeUs));
currentPresentationTimeUs += currentBitmapInfo.frameDurationUs;
if (framesToQueueForCurrentBitmap == 0) {
pendingBitmaps.remove();
if (pendingBitmaps.isEmpty() && inputEnded) {
if (pendingBitmaps.isEmpty() && currentInputStreamEnded) {
// Only signal end of stream after all pending bitmaps are processed.
// TODO(b/269424561): Call signalEndOfCurrentInputStream on every bitmap
shaderProgram.signalEndOfCurrentInputStream();
currentInputStreamEnded = false;
}
}
}
/** Information to generate all the frames associated with a specific {@link Bitmap}. */
private static final class BitmapFrameSequenceInfo {
public final Bitmap bitmap;
......
......@@ -30,6 +30,7 @@ import android.opengl.EGLDisplay;
import android.opengl.GLES20;
import android.opengl.GLES30;
import android.view.Surface;
import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.C;
......@@ -185,7 +186,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
@InputType int inputType,
boolean renderFramesAutomatically,
Executor listenerExecutor,
Listener listener)
......@@ -224,7 +224,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
inputColorInfo,
outputColorInfo,
enableColorTransfers,
inputType,
renderFramesAutomatically,
singleThreadExecutorService,
listenerExecutor,
......@@ -251,18 +250,23 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private final EGLContext eglContext;
private final InputSwitcher inputSwitcher;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
// TODO(b/274109008) Use InputSwither to interact with texture manager.
// Owned and released by inputSwitcher.
private final TextureManager textureManager;
private final VideoFrameProcessor.Listener listener;
private final Executor listenerExecutor;
private final boolean renderFramesAutomatically;
private final FinalShaderProgramWrapper finalShaderProgramWrapper;
// Shader programs that apply Effects.
private final ImmutableList<GlShaderProgram> effectsShaderPrograms;
// A queue of input streams that have not been fully processed identified by their input types.
@GuardedBy("lock")
private final Queue<@InputType Integer> unprocessedInputStreams;
private volatile @MonotonicNonNull CountDownLatch latch;
private final Object lock;
// CountDownLatch to wait for the current input stream to finish processing.
private volatile @MonotonicNonNull CountDownLatch latch;
// TODO(b/274109008) Use InputSwither to interact with texture manager.
// Owned and released by inputSwitcher.
private @MonotonicNonNull TextureManager textureManager;
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
private volatile boolean inputStreamEnded;
private volatile boolean hasRefreshedNextInputFrameInfo;
......@@ -271,37 +275,34 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
EGLDisplay eglDisplay,
EGLContext eglContext,
InputSwitcher inputSwitcher,
@InputType int inputType,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
VideoFrameProcessor.Listener listener,
Executor listenerExecutor,
ImmutableList<GlShaderProgram> effectsShaderPrograms,
boolean renderFramesAutomatically) {
this.eglDisplay = eglDisplay;
this.eglContext = eglContext;
this.inputSwitcher = inputSwitcher;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
this.listener = listener;
this.listenerExecutor = listenerExecutor;
this.renderFramesAutomatically = renderFramesAutomatically;
this.unprocessedInputStreams = new ConcurrentLinkedQueue<>();
this.lock = new Object();
checkState(!effectsShaderPrograms.isEmpty());
checkState(getLast(effectsShaderPrograms) instanceof FinalShaderProgramWrapper);
textureManager = inputSwitcher.switchToInput(inputType);
finalShaderProgramWrapper = (FinalShaderProgramWrapper) getLast(effectsShaderPrograms);
finalShaderProgramWrapper.setOnInputStreamProcessedListener(
() -> {
@InputType int currentInputType = unprocessedInputStreams.remove();
if (latch != null) {
latch.countDown();
}
if (currentInputType == INPUT_TYPE_BITMAP) {
// Remove all pending bitmap input, because BitmapTextureManager signals end of input
// after all queued bitmaps are processed.
while (!unprocessedInputStreams.isEmpty()
&& checkNotNull(unprocessedInputStreams.peek()) == INPUT_TYPE_BITMAP) {
unprocessedInputStreams.remove();
synchronized (lock) {
@InputType int currentInputType = unprocessedInputStreams.remove();
if (latch != null) {
latch.countDown();
}
return inputStreamEnded && unprocessedInputStreams.isEmpty();
}
return inputStreamEnded && unprocessedInputStreams.isEmpty();
});
this.effectsShaderPrograms = effectsShaderPrograms;
}
......@@ -328,7 +329,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
* @param height The default height for input buffers, in pixels.
*/
public void setInputDefaultBufferSize(int width, int height) {
textureManager.setDefaultBufferSize(width, height);
checkNotNull(textureManager).setDefaultBufferSize(width, height);
}
@Override
......@@ -336,38 +337,47 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
checkState(
hasRefreshedNextInputFrameInfo,
"setInputFrameInfo must be called before queueing another bitmap");
textureManager.queueInputBitmap(
inputBitmap,
durationUs,
checkNotNull(nextInputFrameInfo).offsetToAddUs,
frameRate,
/* useHdr= */ false);
checkNotNull(textureManager)
.queueInputBitmap(
inputBitmap,
durationUs,
checkNotNull(nextInputFrameInfo).offsetToAddUs,
frameRate,
/* useHdr= */ false);
hasRefreshedNextInputFrameInfo = false;
}
@Override
public Surface getInputSurface() {
return textureManager.getInputSurface();
return checkNotNull(textureManager).getInputSurface();
}
@Override
public void registerInputStream(@InputType int inputType) {
if (!unprocessedInputStreams.isEmpty()) {
// Wait until the current video is processed before continuing to the next input.
if (checkNotNull(unprocessedInputStreams.peek()) == INPUT_TYPE_SURFACE) {
latch = new CountDownLatch(1);
textureManager.signalEndOfCurrentInputStream();
try {
latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
Log.e(TAG, "Error waiting for end of stream " + e);
}
} else {
textureManager.signalEndOfCurrentInputStream();
@InputType int currentInputType;
synchronized (lock) {
if (unprocessedInputStreams.isEmpty()) {
textureManager = inputSwitcher.switchToInput(inputType);
unprocessedInputStreams.add(inputType);
return;
}
currentInputType = checkNotNull(unprocessedInputStreams.peek());
}
checkNotNull(textureManager).signalEndOfCurrentInputStream();
// Wait until the current input stream is processed before continuing to the next input.
latch = new CountDownLatch(1);
try {
latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
listenerExecutor.execute(() -> listener.onError(VideoFrameProcessingException.from(e)));
}
textureManager = inputSwitcher.switchToInput(inputType);
synchronized (lock) {
unprocessedInputStreams.add(inputType);
}
unprocessedInputStreams.add(inputType);
}
@Override
......@@ -382,13 +392,13 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
checkStateNotNull(
nextInputFrameInfo, "setInputFrameInfo must be called before registering input frames");
textureManager.registerInputFrame(nextInputFrameInfo);
checkNotNull(textureManager).registerInputFrame(nextInputFrameInfo);
hasRefreshedNextInputFrameInfo = false;
}
@Override
public int getPendingInputFrameCount() {
return textureManager.getPendingFrameCount();
return checkNotNull(textureManager).getPendingFrameCount();
}
@Override
......@@ -409,8 +419,15 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
public void signalEndOfInput() {
checkState(!inputStreamEnded);
inputStreamEnded = true;
textureManager.signalEndOfCurrentInputStream();
inputSwitcher.signalEndOfInput();
boolean allInputStreamsProcessed;
synchronized (lock) {
allInputStreamsProcessed = unprocessedInputStreams.isEmpty();
}
if (allInputStreamsProcessed) {
inputSwitcher.signalEndOfInput();
} else {
checkNotNull(textureManager).signalEndOfCurrentInputStream();
}
}
@Override
......@@ -418,10 +435,10 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
try {
videoFrameProcessingTaskExecutor.flush();
CountDownLatch latch = new CountDownLatch(1);
textureManager.setOnFlushCompleteListener(latch::countDown);
checkNotNull(textureManager).setOnFlushCompleteListener(latch::countDown);
videoFrameProcessingTaskExecutor.submit(finalShaderProgramWrapper::flush);
latch.await();
textureManager.setOnFlushCompleteListener(null);
checkNotNull(textureManager).setOnFlushCompleteListener(null);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
......@@ -478,7 +495,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
boolean enableColorTransfers,
@InputType int inputType,
boolean renderFramesAutomatically,
ExecutorService singleThreadExecutorService,
Executor executor,
......@@ -535,7 +551,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
eglDisplay,
eglContext,
debugViewProvider,
/* inputColorInfo= */ linearColorInfo,
outputColorInfo,
enableColorTransfers,
renderFramesAutomatically,
......@@ -544,8 +559,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
glObjectsProvider,
textureOutputListener);
// TODO(b/274109008): Register both image and video input.
inputSwitcher.registerInput(inputType);
inputSwitcher.registerInput(INPUT_TYPE_SURFACE);
inputSwitcher.registerInput(INPUT_TYPE_BITMAP);
inputSwitcher.setDownstreamShaderProgram(effectsShaderPrograms.get(0));
setGlObjectProviderOnShaderPrograms(effectsShaderPrograms, glObjectsProvider);
......@@ -556,8 +571,9 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
eglDisplay,
eglContext,
inputSwitcher,
inputType,
videoFrameProcessingTaskExecutor,
listener,
executor,
effectsShaderPrograms,
renderFramesAutomatically);
}
......@@ -578,7 +594,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
EGLDisplay eglDisplay,
EGLContext eglContext,
DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
boolean enableColorTransfers,
boolean renderFramesAutomatically,
......@@ -631,7 +646,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
matrixTransformationListBuilder.build(),
rgbMatrixListBuilder.build(),
debugViewProvider,
inputColorInfo,
outputColorInfo,
enableColorTransfers,
renderFramesAutomatically,
......
......@@ -119,7 +119,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
ImmutableList<GlMatrixTransformation> matrixTransformations,
ImmutableList<RgbMatrix> rgbMatrices,
DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
boolean enableColorTransfers,
boolean renderFramesAutomatically,
......
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
import android.net.Uri;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.effect.Presentation;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.common.collect.ImmutableList;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* End-to-end instrumentation test for {@link Transformer} for cases that cannot be tested using
* robolectric.
*
* <p>This test aims at testing input of {@linkplain VideoFrameProcessor.InputType mixed types of
* input}.
*/
@RunWith(AndroidJUnit4.class)
public class TransformerMixedInputEndToEndTest {
// Result of the following command for MP4_ASSET_URI_STRING
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
private static final int VIDEO_FRAME_COUNT_FOR_MP4_ASSET = 30;
private final Context context = ApplicationProvider.getApplicationContext();
@Test
public void videoEditing_withImageThenVideoInputs_completesWithCorrectFrameCount()
throws Exception {
String testId = "videoEditing_withImageThenVideoInputs_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 31;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 360);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(ImmutableList.of(imageEditedMediaItem, videoEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(imageFrameCount + VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
@Test
public void videoEditing_withVideoThenImageInputs_completesWithCorrectFrameCount()
throws Exception {
String testId = "videoEditing_withVideoThenImageInputs_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 32;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
// Result of the following command:
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 480);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(ImmutableList.of(videoEditedMediaItem, imageEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(imageFrameCount + VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
@Test
public void
videoEditing_withComplexVideoAndImageInputsEndWithVideo_completesWithCorrectFrameCount()
throws Exception {
String testId =
"videoEditing_withComplexVideoAndImageInputsEndWithVideo_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 33;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
// Result of the following command:
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 360);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(
ImmutableList.of(
videoEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem,
imageEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem,
videoEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(3 * imageFrameCount + 4 * VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
@Test
public void
videoEditing_withComplexVideoAndImageInputsEndWithImage_completesWithCorrectFrameCount()
throws Exception {
String testId =
"videoEditing_withComplexVideoAndImageInputsEndWithImage_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 34;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
// Result of the following command:
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 480);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(
ImmutableList.of(
imageEditedMediaItem,
videoEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem,
imageEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(4 * imageFrameCount + 3 * VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
/** Creates an {@link EditedMediaItem} with image, with duration of one second. */
private static EditedMediaItem createImageEditedMediaItem(String uri, int frameCount) {
return new EditedMediaItem.Builder(MediaItem.fromUri(uri))
.setDurationUs(C.MICROS_PER_SECOND)
.setFrameRate(frameCount)
.build();
}
/**
* Creates an {@link EditedMediaItem} with video, with audio removed and a {@link Presentation} of
* specified {@code height}.
*/
private static EditedMediaItem createVideoEditedMediaItem(String uri, int height) {
return new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(uri)))
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
ImmutableList.of(Presentation.createForHeight(height))))
.setRemoveAudio(true)
.build();
}
private static Composition buildComposition(ImmutableList<EditedMediaItem> editedMediaItems) {
return new Composition.Builder(ImmutableList.of(new EditedMediaItemSequence(editedMediaItems)))
.setEffects(
new Effects(
ImmutableList.of(),
ImmutableList.of(
// To ensure that software encoders can encode.
Presentation.createForWidthAndHeight(
/* width= */ 480, /* height= */ 360, Presentation.LAYOUT_SCALE_TO_FIT))))
.build();
}
}
......@@ -147,9 +147,6 @@ import org.checkerframework.dataflow.qual.Pure;
if (presentation != null) {
effectsWithPresentation.add(presentation);
}
@VideoFrameProcessor.InputType
int inputType =
MimeTypes.isVideo(firstInputFormat.sampleMimeType) ? INPUT_TYPE_SURFACE : INPUT_TYPE_BITMAP;
try {
videoFrameProcessor =
videoFrameProcessorFactory.create(
......@@ -158,7 +155,6 @@ import org.checkerframework.dataflow.qual.Pure;
debugViewProvider,
videoFrameProcessorInputColor,
videoFrameProcessorOutputColor,
inputType,
/* renderFramesAutomatically= */ true,
MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
......@@ -218,8 +214,15 @@ import org.checkerframework.dataflow.qual.Pure;
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(mediaItemOffsetUs.get())
.build());
videoFrameProcessor.registerInputStream(
MimeTypes.isVideo(trackFormat.sampleMimeType) ? INPUT_TYPE_SURFACE : INPUT_TYPE_BITMAP);
String mimeType = checkNotNull(trackFormat.sampleMimeType);
if (MimeTypes.isVideo(mimeType)) {
videoFrameProcessor.registerInputStream(INPUT_TYPE_SURFACE);
} else if (MimeTypes.isImage(mimeType)) {
videoFrameProcessor.registerInputStream(INPUT_TYPE_BITMAP);
} else {
throw new IllegalArgumentException("MIME type not supported " + mimeType);
}
}
mediaItemOffsetUs.addAndGet(durationUs);
}
......
......@@ -275,7 +275,6 @@ public final class VideoFrameProcessorTestRunner {
DebugViewProvider.NONE,
inputColorInfo,
outputColorInfo,
inputType,
/* renderFramesAutomatically= */ true,
MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
......@@ -311,6 +310,7 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessingEnded = true;
}
});
videoFrameProcessor.registerInputStream(inputType);
}
public void processFirstFrameAndEnd() throws Exception {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment