Commit a43ef109 by claincly Committed by Tofunmi Adigun-Hameed

Enable switching input types via InputMultiplexer.

PiperOrigin-RevId: 529624205
parent 5dbbff49
...@@ -77,13 +77,12 @@ public interface VideoFrameProcessor { ...@@ -77,13 +77,12 @@ public interface VideoFrameProcessor {
* @param debugViewProvider A {@link DebugViewProvider}. * @param debugViewProvider A {@link DebugViewProvider}.
* @param inputColorInfo The {@link ColorInfo} for input frames. * @param inputColorInfo The {@link ColorInfo} for input frames.
* @param outputColorInfo The {@link ColorInfo} for output frames. * @param outputColorInfo The {@link ColorInfo} for output frames.
* @param inputType The {@link InputType}.
* @param renderFramesAutomatically If {@code true}, the instance will render output frames to * @param renderFramesAutomatically If {@code true}, the instance will render output frames to
* the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as * the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as
* {@link VideoFrameProcessor} is done processing them. If {@code false}, the {@link * {@link VideoFrameProcessor} is done processing them. If {@code false}, the {@link
* VideoFrameProcessor} will block until {@link #renderOutputFrame(long)} is called, to * VideoFrameProcessor} will block until {@link #renderOutputFrame(long)} is called, to
* render or drop the frame. * render or drop the frame.
* @param executor The {@link Executor} on which the {@code listener} is invoked. * @param listenerExecutor The {@link Executor} on which the {@code listener} is invoked.
* @param listener A {@link Listener}. * @param listener A {@link Listener}.
* @return A new instance. * @return A new instance.
* @throws VideoFrameProcessingException If a problem occurs while creating the {@link * @throws VideoFrameProcessingException If a problem occurs while creating the {@link
...@@ -95,9 +94,8 @@ public interface VideoFrameProcessor { ...@@ -95,9 +94,8 @@ public interface VideoFrameProcessor {
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
@InputType int inputType,
boolean renderFramesAutomatically, boolean renderFramesAutomatically,
Executor executor, Executor listenerExecutor,
Listener listener) Listener listener)
throws VideoFrameProcessingException; throws VideoFrameProcessingException;
} }
...@@ -153,15 +151,14 @@ public interface VideoFrameProcessor { ...@@ -153,15 +151,14 @@ public interface VideoFrameProcessor {
/** /**
* Provides an input {@link Bitmap} to the {@link VideoFrameProcessor}. * Provides an input {@link Bitmap} to the {@link VideoFrameProcessor}.
* *
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_BITMAP}.
*
* <p>Can be called on any thread. * <p>Can be called on any thread.
* *
* @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}. * @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds. * @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per * @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
* second. * second.
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_BITMAP bitmap input}.
*/ */
// TODO(b/262693274): Remove duration and frameRate parameters when EditedMediaItem can be // TODO(b/262693274): Remove duration and frameRate parameters when EditedMediaItem can be
// signalled down to the processors. // signalled down to the processors.
...@@ -171,10 +168,10 @@ public interface VideoFrameProcessor { ...@@ -171,10 +168,10 @@ public interface VideoFrameProcessor {
* Returns the input {@link Surface}, where {@link VideoFrameProcessor} consumes input frames * Returns the input {@link Surface}, where {@link VideoFrameProcessor} consumes input frames
* from. * from.
* *
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_SURFACE}.
*
* <p>Can be called on any thread. * <p>Can be called on any thread.
*
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_SURFACE surface input}.
*/ */
Surface getInputSurface(); Surface getInputSurface();
...@@ -206,11 +203,10 @@ public interface VideoFrameProcessor { ...@@ -206,11 +203,10 @@ public interface VideoFrameProcessor {
* *
* <p>Must be called before rendering a frame to the input surface. * <p>Must be called before rendering a frame to the input surface.
* *
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_SURFACE}.
*
* <p>Can be called on any thread. * <p>Can be called on any thread.
* *
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_SURFACE surface input}.
* @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link * @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link
* #setInputFrameInfo(FrameInfo)}. * #setInputFrameInfo(FrameInfo)}.
*/ */
...@@ -278,10 +274,10 @@ public interface VideoFrameProcessor { ...@@ -278,10 +274,10 @@ public interface VideoFrameProcessor {
* <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this * <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this
* method are no longer considered to be registered when this method returns. * method are no longer considered to be registered when this method returns.
* *
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_SURFACE}.
*
* <p>{@link Listener} methods invoked prior to calling this method should be ignored. * <p>{@link Listener} methods invoked prior to calling this method should be ignored.
*
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_SURFACE surface input}.
*/ */
void flush(); void flush();
......
...@@ -22,7 +22,6 @@ import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE ...@@ -22,7 +22,6 @@ import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE
import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState; import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static java.lang.Math.max; import static java.lang.Math.max;
import static java.lang.Math.min; import static java.lang.Math.min;
...@@ -2022,9 +2021,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -2022,9 +2021,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
DebugViewProvider.NONE, DebugViewProvider.NONE,
inputAndOutputColorInfos.first, inputAndOutputColorInfos.first,
inputAndOutputColorInfos.second, inputAndOutputColorInfos.second,
INPUT_TYPE_SURFACE,
/* renderFramesAutomatically= */ false, /* renderFramesAutomatically= */ false,
/* executor= */ handler::post, /* listenerExecutor= */ handler::post,
new VideoFrameProcessor.Listener() { new VideoFrameProcessor.Listener() {
@Override @Override
public void onOutputSizeChanged(int width, int height) { public void onOutputSizeChanged(int width, int height) {
...@@ -2075,6 +2073,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -2075,6 +2073,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
throw new IllegalStateException(); throw new IllegalStateException();
} }
}); });
videoFrameProcessor.registerInputStream(VideoFrameProcessor.INPUT_TYPE_SURFACE);
this.initialStreamOffsetUs = initialStreamOffsetUs; this.initialStreamOffsetUs = initialStreamOffsetUs;
} catch (Exception e) { } catch (Exception e) {
throw renderer.createRendererException( throw renderer.createRendererException(
......
...@@ -301,7 +301,6 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest { ...@@ -301,7 +301,6 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
DebugViewProvider.NONE, DebugViewProvider.NONE,
/* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED, /* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED, /* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
INPUT_TYPE_SURFACE,
renderFramesAutomatically, renderFramesAutomatically,
MoreExecutors.directExecutor(), MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() { new VideoFrameProcessor.Listener() {
...@@ -341,7 +340,6 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest { ...@@ -341,7 +340,6 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
videoFrameProcessingEndedCountDownLatch.countDown(); videoFrameProcessingEndedCountDownLatch.countDown();
} }
})); }));
defaultVideoFrameProcessor defaultVideoFrameProcessor
.getTaskExecutor() .getTaskExecutor()
.submit( .submit(
......
...@@ -41,14 +41,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -41,14 +41,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor; private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
// The queue holds all bitmaps with one or more frames pending to be sent downstream. // The queue holds all bitmaps with one or more frames pending to be sent downstream.
private final Queue<BitmapFrameSequenceInfo> pendingBitmaps; private final Queue<BitmapFrameSequenceInfo> pendingBitmaps;
private @MonotonicNonNull GlTextureInfo currentGlTextureInfo; private @MonotonicNonNull GlTextureInfo currentGlTextureInfo;
private int downstreamShaderProgramCapacity; private int downstreamShaderProgramCapacity;
private int framesToQueueForCurrentBitmap; private int framesToQueueForCurrentBitmap;
private double currentPresentationTimeUs; private double currentPresentationTimeUs;
private boolean useHdr; private boolean useHdr;
private boolean inputEnded; private boolean currentInputStreamEnded;
/** /**
* Creates a new instance. * Creates a new instance.
* *
...@@ -78,7 +76,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -78,7 +76,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
public void queueInputBitmap( public void queueInputBitmap(
Bitmap inputBitmap, long durationUs, long offsetUs, float frameRate, boolean useHdr) { Bitmap inputBitmap, long durationUs, long offsetUs, float frameRate, boolean useHdr) {
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> setupBitmap(inputBitmap, durationUs, offsetUs, frameRate, useHdr)); () -> {
setupBitmap(inputBitmap, durationUs, offsetUs, frameRate, useHdr);
currentInputStreamEnded = false;
});
} }
@Override @Override
...@@ -89,7 +90,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -89,7 +90,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public void signalEndOfCurrentInputStream() { public void signalEndOfCurrentInputStream() {
// Do nothing here. End of current input signaling is handled in maybeQueueToShaderProgram(). signalEndOfInput();
} }
@Override @Override
...@@ -99,7 +100,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -99,7 +100,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (framesToQueueForCurrentBitmap == 0 && pendingBitmaps.isEmpty()) { if (framesToQueueForCurrentBitmap == 0 && pendingBitmaps.isEmpty()) {
shaderProgram.signalEndOfCurrentInputStream(); shaderProgram.signalEndOfCurrentInputStream();
} else { } else {
inputEnded = true; currentInputStreamEnded = true;
} }
}); });
} }
...@@ -118,9 +119,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -118,9 +119,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
}); });
} }
// Methods that must be called on the GL thread. // Methods that must be called on the GL thread.
private void setupBitmap( private void setupBitmap(
Bitmap bitmap, long durationUs, long offsetUs, float frameRate, boolean useHdr) Bitmap bitmap, long durationUs, long offsetUs, float frameRate, boolean useHdr)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
...@@ -128,7 +127,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -128,7 +127,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
int framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND)); int framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND));
double frameDurationUs = C.MICROS_PER_SECOND / frameRate; double frameDurationUs = C.MICROS_PER_SECOND / frameRate;
pendingBitmaps.add(new BitmapFrameSequenceInfo(bitmap, offsetUs, frameDurationUs, framesToAdd)); pendingBitmaps.add(new BitmapFrameSequenceInfo(bitmap, offsetUs, frameDurationUs, framesToAdd));
maybeQueueToShaderProgram(); maybeQueueToShaderProgram();
} }
...@@ -136,7 +134,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -136,7 +134,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (pendingBitmaps.isEmpty() || downstreamShaderProgramCapacity == 0) { if (pendingBitmaps.isEmpty() || downstreamShaderProgramCapacity == 0) {
return; return;
} }
BitmapFrameSequenceInfo currentBitmapInfo = checkNotNull(pendingBitmaps.peek()); BitmapFrameSequenceInfo currentBitmapInfo = checkNotNull(pendingBitmaps.peek());
if (framesToQueueForCurrentBitmap == 0) { if (framesToQueueForCurrentBitmap == 0) {
Bitmap bitmap = currentBitmapInfo.bitmap; Bitmap bitmap = currentBitmapInfo.bitmap;
...@@ -166,24 +163,21 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -166,24 +163,21 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
bitmap.getWidth(), bitmap.getWidth(),
bitmap.getHeight()); bitmap.getHeight());
} }
framesToQueueForCurrentBitmap--; framesToQueueForCurrentBitmap--;
downstreamShaderProgramCapacity--; downstreamShaderProgramCapacity--;
shaderProgram.queueInputFrame( shaderProgram.queueInputFrame(
checkNotNull(currentGlTextureInfo), round(currentPresentationTimeUs)); checkNotNull(currentGlTextureInfo), round(currentPresentationTimeUs));
currentPresentationTimeUs += currentBitmapInfo.frameDurationUs; currentPresentationTimeUs += currentBitmapInfo.frameDurationUs;
if (framesToQueueForCurrentBitmap == 0) { if (framesToQueueForCurrentBitmap == 0) {
pendingBitmaps.remove(); pendingBitmaps.remove();
if (pendingBitmaps.isEmpty() && inputEnded) { if (pendingBitmaps.isEmpty() && currentInputStreamEnded) {
// Only signal end of stream after all pending bitmaps are processed. // Only signal end of stream after all pending bitmaps are processed.
// TODO(b/269424561): Call signalEndOfCurrentInputStream on every bitmap // TODO(b/269424561): Call signalEndOfCurrentInputStream on every bitmap
shaderProgram.signalEndOfCurrentInputStream(); shaderProgram.signalEndOfCurrentInputStream();
currentInputStreamEnded = false;
} }
} }
} }
/** Information to generate all the frames associated with a specific {@link Bitmap}. */ /** Information to generate all the frames associated with a specific {@link Bitmap}. */
private static final class BitmapFrameSequenceInfo { private static final class BitmapFrameSequenceInfo {
public final Bitmap bitmap; public final Bitmap bitmap;
......
...@@ -119,7 +119,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -119,7 +119,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
ImmutableList<GlMatrixTransformation> matrixTransformations, ImmutableList<GlMatrixTransformation> matrixTransformations,
ImmutableList<RgbMatrix> rgbMatrices, ImmutableList<RgbMatrix> rgbMatrices,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
boolean enableColorTransfers, boolean enableColorTransfers,
boolean renderFramesAutomatically, boolean renderFramesAutomatically,
......
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
import android.net.Uri;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.effect.Presentation;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.common.collect.ImmutableList;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* End-to-end instrumentation test for {@link Transformer} for cases that cannot be tested using
* robolectric.
*
* <p>This test aims at testing input of {@linkplain VideoFrameProcessor.InputType mixed types of
* input}.
*/
@RunWith(AndroidJUnit4.class)
public class TransformerMixedInputEndToEndTest {
// Result of the following command for MP4_ASSET_URI_STRING
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
private static final int VIDEO_FRAME_COUNT_FOR_MP4_ASSET = 30;
private final Context context = ApplicationProvider.getApplicationContext();
@Test
public void videoEditing_withImageThenVideoInputs_completesWithCorrectFrameCount()
throws Exception {
String testId = "videoEditing_withImageThenVideoInputs_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 31;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 360);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(ImmutableList.of(imageEditedMediaItem, videoEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(imageFrameCount + VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
@Test
public void videoEditing_withVideoThenImageInputs_completesWithCorrectFrameCount()
throws Exception {
String testId = "videoEditing_withVideoThenImageInputs_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 32;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
// Result of the following command:
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 480);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(ImmutableList.of(videoEditedMediaItem, imageEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(imageFrameCount + VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
@Test
public void
videoEditing_withComplexVideoAndImageInputsEndWithVideo_completesWithCorrectFrameCount()
throws Exception {
String testId =
"videoEditing_withComplexVideoAndImageInputsEndWithVideo_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 33;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
// Result of the following command:
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 360);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(
ImmutableList.of(
videoEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem,
imageEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem,
videoEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(3 * imageFrameCount + 4 * VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
@Test
public void
videoEditing_withComplexVideoAndImageInputsEndWithImage_completesWithCorrectFrameCount()
throws Exception {
String testId =
"videoEditing_withComplexVideoAndImageInputsEndWithImage_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 34;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
// Result of the following command:
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 480);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(
ImmutableList.of(
imageEditedMediaItem,
videoEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem,
imageEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(4 * imageFrameCount + 3 * VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
/** Creates an {@link EditedMediaItem} with image, with duration of one second. */
private static EditedMediaItem createImageEditedMediaItem(String uri, int frameCount) {
return new EditedMediaItem.Builder(MediaItem.fromUri(uri))
.setDurationUs(C.MICROS_PER_SECOND)
.setFrameRate(frameCount)
.build();
}
/**
* Creates an {@link EditedMediaItem} with video, with audio removed and a {@link Presentation} of
* specified {@code height}.
*/
private static EditedMediaItem createVideoEditedMediaItem(String uri, int height) {
return new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(uri)))
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
ImmutableList.of(Presentation.createForHeight(height))))
.setRemoveAudio(true)
.build();
}
private static Composition buildComposition(ImmutableList<EditedMediaItem> editedMediaItems) {
return new Composition.Builder(ImmutableList.of(new EditedMediaItemSequence(editedMediaItems)))
.setEffects(
new Effects(
ImmutableList.of(),
ImmutableList.of(
// To ensure that software encoders can encode.
Presentation.createForWidthAndHeight(
/* width= */ 480, /* height= */ 360, Presentation.LAYOUT_SCALE_TO_FIT))))
.build();
}
}
...@@ -147,9 +147,6 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -147,9 +147,6 @@ import org.checkerframework.dataflow.qual.Pure;
if (presentation != null) { if (presentation != null) {
effectsWithPresentation.add(presentation); effectsWithPresentation.add(presentation);
} }
@VideoFrameProcessor.InputType
int inputType =
MimeTypes.isVideo(firstInputFormat.sampleMimeType) ? INPUT_TYPE_SURFACE : INPUT_TYPE_BITMAP;
try { try {
videoFrameProcessor = videoFrameProcessor =
videoFrameProcessorFactory.create( videoFrameProcessorFactory.create(
...@@ -158,7 +155,6 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -158,7 +155,6 @@ import org.checkerframework.dataflow.qual.Pure;
debugViewProvider, debugViewProvider,
videoFrameProcessorInputColor, videoFrameProcessorInputColor,
videoFrameProcessorOutputColor, videoFrameProcessorOutputColor,
inputType,
/* renderFramesAutomatically= */ true, /* renderFramesAutomatically= */ true,
MoreExecutors.directExecutor(), MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() { new VideoFrameProcessor.Listener() {
...@@ -218,8 +214,15 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -218,8 +214,15 @@ import org.checkerframework.dataflow.qual.Pure;
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio) .setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(mediaItemOffsetUs.get()) .setOffsetToAddUs(mediaItemOffsetUs.get())
.build()); .build());
videoFrameProcessor.registerInputStream(
MimeTypes.isVideo(trackFormat.sampleMimeType) ? INPUT_TYPE_SURFACE : INPUT_TYPE_BITMAP); String mimeType = checkNotNull(trackFormat.sampleMimeType);
if (MimeTypes.isVideo(mimeType)) {
videoFrameProcessor.registerInputStream(INPUT_TYPE_SURFACE);
} else if (MimeTypes.isImage(mimeType)) {
videoFrameProcessor.registerInputStream(INPUT_TYPE_BITMAP);
} else {
throw new IllegalArgumentException("MIME type not supported " + mimeType);
}
} }
mediaItemOffsetUs.addAndGet(durationUs); mediaItemOffsetUs.addAndGet(durationUs);
} }
......
...@@ -275,7 +275,6 @@ public final class VideoFrameProcessorTestRunner { ...@@ -275,7 +275,6 @@ public final class VideoFrameProcessorTestRunner {
DebugViewProvider.NONE, DebugViewProvider.NONE,
inputColorInfo, inputColorInfo,
outputColorInfo, outputColorInfo,
inputType,
/* renderFramesAutomatically= */ true, /* renderFramesAutomatically= */ true,
MoreExecutors.directExecutor(), MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() { new VideoFrameProcessor.Listener() {
...@@ -311,6 +310,7 @@ public final class VideoFrameProcessorTestRunner { ...@@ -311,6 +310,7 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessingEnded = true; videoFrameProcessingEnded = true;
} }
}); });
videoFrameProcessor.registerInputStream(inputType);
} }
public void processFirstFrameAndEnd() throws Exception { public void processFirstFrameAndEnd() throws Exception {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment