Commit a43ef109 by claincly Committed by Tofunmi Adigun-Hameed

Enable switching input types via InputMultiplexer.

PiperOrigin-RevId: 529624205
parent 5dbbff49
......@@ -77,13 +77,12 @@ public interface VideoFrameProcessor {
* @param debugViewProvider A {@link DebugViewProvider}.
* @param inputColorInfo The {@link ColorInfo} for input frames.
* @param outputColorInfo The {@link ColorInfo} for output frames.
* @param inputType The {@link InputType}.
* @param renderFramesAutomatically If {@code true}, the instance will render output frames to
* the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as
* {@link VideoFrameProcessor} is done processing them. If {@code false}, the {@link
* VideoFrameProcessor} will block until {@link #renderOutputFrame(long)} is called, to
* render or drop the frame.
* @param executor The {@link Executor} on which the {@code listener} is invoked.
* @param listenerExecutor The {@link Executor} on which the {@code listener} is invoked.
* @param listener A {@link Listener}.
* @return A new instance.
* @throws VideoFrameProcessingException If a problem occurs while creating the {@link
......@@ -95,9 +94,8 @@ public interface VideoFrameProcessor {
DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
@InputType int inputType,
boolean renderFramesAutomatically,
Executor executor,
Executor listenerExecutor,
Listener listener)
throws VideoFrameProcessingException;
}
......@@ -153,15 +151,14 @@ public interface VideoFrameProcessor {
/**
* Provides an input {@link Bitmap} to the {@link VideoFrameProcessor}.
*
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_BITMAP}.
*
* <p>Can be called on any thread.
*
* @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
* second.
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_BITMAP bitmap input}.
*/
// TODO(b/262693274): Remove duration and frameRate parameters when EditedMediaItem can be
// signalled down to the processors.
......@@ -171,10 +168,10 @@ public interface VideoFrameProcessor {
* Returns the input {@link Surface}, where {@link VideoFrameProcessor} consumes input frames
* from.
*
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_SURFACE}.
*
* <p>Can be called on any thread.
*
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_SURFACE surface input}.
*/
Surface getInputSurface();
......@@ -206,11 +203,10 @@ public interface VideoFrameProcessor {
*
* <p>Must be called before rendering a frame to the input surface.
*
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_SURFACE}.
*
* <p>Can be called on any thread.
*
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_SURFACE surface input}.
* @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link
* #setInputFrameInfo(FrameInfo)}.
*/
......@@ -278,10 +274,10 @@ public interface VideoFrameProcessor {
* <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this
* method are no longer considered to be registered when this method returns.
*
* <p>This method must only be called when the {@link VideoFrameProcessor} is {@linkplain
* Factory#create created} with {@link #INPUT_TYPE_SURFACE}.
*
* <p>{@link Listener} methods invoked prior to calling this method should be ignored.
*
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_SURFACE surface input}.
*/
void flush();
......
......@@ -22,7 +22,6 @@ import static com.google.android.exoplayer2.decoder.DecoderReuseEvaluation.REUSE
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import static com.google.android.exoplayer2.util.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static java.lang.Math.max;
import static java.lang.Math.min;
......@@ -2022,9 +2021,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
DebugViewProvider.NONE,
inputAndOutputColorInfos.first,
inputAndOutputColorInfos.second,
INPUT_TYPE_SURFACE,
/* renderFramesAutomatically= */ false,
/* executor= */ handler::post,
/* listenerExecutor= */ handler::post,
new VideoFrameProcessor.Listener() {
@Override
public void onOutputSizeChanged(int width, int height) {
......@@ -2075,6 +2073,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
throw new IllegalStateException();
}
});
videoFrameProcessor.registerInputStream(VideoFrameProcessor.INPUT_TYPE_SURFACE);
this.initialStreamOffsetUs = initialStreamOffsetUs;
} catch (Exception e) {
throw renderer.createRendererException(
......
......@@ -301,7 +301,6 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
DebugViewProvider.NONE,
/* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
INPUT_TYPE_SURFACE,
renderFramesAutomatically,
MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
......@@ -341,7 +340,6 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
videoFrameProcessingEndedCountDownLatch.countDown();
}
}));
defaultVideoFrameProcessor
.getTaskExecutor()
.submit(
......
......@@ -41,14 +41,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
// The queue holds all bitmaps with one or more frames pending to be sent downstream.
private final Queue<BitmapFrameSequenceInfo> pendingBitmaps;
private @MonotonicNonNull GlTextureInfo currentGlTextureInfo;
private int downstreamShaderProgramCapacity;
private int framesToQueueForCurrentBitmap;
private double currentPresentationTimeUs;
private boolean useHdr;
private boolean inputEnded;
private boolean currentInputStreamEnded;
/**
* Creates a new instance.
*
......@@ -78,7 +76,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
public void queueInputBitmap(
Bitmap inputBitmap, long durationUs, long offsetUs, float frameRate, boolean useHdr) {
videoFrameProcessingTaskExecutor.submit(
() -> setupBitmap(inputBitmap, durationUs, offsetUs, frameRate, useHdr));
() -> {
setupBitmap(inputBitmap, durationUs, offsetUs, frameRate, useHdr);
currentInputStreamEnded = false;
});
}
@Override
......@@ -89,7 +90,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void signalEndOfCurrentInputStream() {
// Do nothing here. End of current input signaling is handled in maybeQueueToShaderProgram().
signalEndOfInput();
}
@Override
......@@ -99,7 +100,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (framesToQueueForCurrentBitmap == 0 && pendingBitmaps.isEmpty()) {
shaderProgram.signalEndOfCurrentInputStream();
} else {
inputEnded = true;
currentInputStreamEnded = true;
}
});
}
......@@ -118,9 +119,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
});
}
// Methods that must be called on the GL thread.
private void setupBitmap(
Bitmap bitmap, long durationUs, long offsetUs, float frameRate, boolean useHdr)
throws VideoFrameProcessingException {
......@@ -128,7 +127,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
int framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND));
double frameDurationUs = C.MICROS_PER_SECOND / frameRate;
pendingBitmaps.add(new BitmapFrameSequenceInfo(bitmap, offsetUs, frameDurationUs, framesToAdd));
maybeQueueToShaderProgram();
}
......@@ -136,7 +134,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (pendingBitmaps.isEmpty() || downstreamShaderProgramCapacity == 0) {
return;
}
BitmapFrameSequenceInfo currentBitmapInfo = checkNotNull(pendingBitmaps.peek());
if (framesToQueueForCurrentBitmap == 0) {
Bitmap bitmap = currentBitmapInfo.bitmap;
......@@ -166,24 +163,21 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
bitmap.getWidth(),
bitmap.getHeight());
}
framesToQueueForCurrentBitmap--;
downstreamShaderProgramCapacity--;
shaderProgram.queueInputFrame(
checkNotNull(currentGlTextureInfo), round(currentPresentationTimeUs));
currentPresentationTimeUs += currentBitmapInfo.frameDurationUs;
if (framesToQueueForCurrentBitmap == 0) {
pendingBitmaps.remove();
if (pendingBitmaps.isEmpty() && inputEnded) {
if (pendingBitmaps.isEmpty() && currentInputStreamEnded) {
// Only signal end of stream after all pending bitmaps are processed.
// TODO(b/269424561): Call signalEndOfCurrentInputStream on every bitmap
shaderProgram.signalEndOfCurrentInputStream();
currentInputStreamEnded = false;
}
}
}
/** Information to generate all the frames associated with a specific {@link Bitmap}. */
private static final class BitmapFrameSequenceInfo {
public final Bitmap bitmap;
......
......@@ -119,7 +119,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
ImmutableList<GlMatrixTransformation> matrixTransformations,
ImmutableList<RgbMatrix> rgbMatrices,
DebugViewProvider debugViewProvider,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
boolean enableColorTransfers,
boolean renderFramesAutomatically,
......
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
import android.net.Uri;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.effect.Presentation;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.common.collect.ImmutableList;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* End-to-end instrumentation test for {@link Transformer} for cases that cannot be tested using
* robolectric.
*
* <p>This test aims at testing input of {@linkplain VideoFrameProcessor.InputType mixed types of
* input}.
*/
@RunWith(AndroidJUnit4.class)
public class TransformerMixedInputEndToEndTest {
// Result of the following command for MP4_ASSET_URI_STRING
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
private static final int VIDEO_FRAME_COUNT_FOR_MP4_ASSET = 30;
private final Context context = ApplicationProvider.getApplicationContext();
@Test
public void videoEditing_withImageThenVideoInputs_completesWithCorrectFrameCount()
throws Exception {
String testId = "videoEditing_withImageThenVideoInputs_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 31;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 360);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(ImmutableList.of(imageEditedMediaItem, videoEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(imageFrameCount + VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
@Test
public void videoEditing_withVideoThenImageInputs_completesWithCorrectFrameCount()
throws Exception {
String testId = "videoEditing_withVideoThenImageInputs_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 32;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
// Result of the following command:
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 480);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(ImmutableList.of(videoEditedMediaItem, imageEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(imageFrameCount + VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
@Test
public void
videoEditing_withComplexVideoAndImageInputsEndWithVideo_completesWithCorrectFrameCount()
throws Exception {
String testId =
"videoEditing_withComplexVideoAndImageInputsEndWithVideo_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 33;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
// Result of the following command:
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 360);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(
ImmutableList.of(
videoEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem,
imageEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem,
videoEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(3 * imageFrameCount + 4 * VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
@Test
public void
videoEditing_withComplexVideoAndImageInputsEndWithImage_completesWithCorrectFrameCount()
throws Exception {
String testId =
"videoEditing_withComplexVideoAndImageInputsEndWithImage_completesWithCorrectFrameCount";
Transformer transformer =
new Transformer.Builder(context)
.setEncoderFactory(
new DefaultEncoderFactory.Builder(context).setEnableFallback(false).build())
.build();
int imageFrameCount = 34;
EditedMediaItem imageEditedMediaItem =
createImageEditedMediaItem(PNG_ASSET_URI_STRING, /* frameCount= */ imageFrameCount);
// Result of the following command:
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
EditedMediaItem videoEditedMediaItem =
createVideoEditedMediaItem(MP4_ASSET_URI_STRING, /* height= */ 480);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(
testId,
buildComposition(
ImmutableList.of(
imageEditedMediaItem,
videoEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem,
imageEditedMediaItem,
videoEditedMediaItem,
imageEditedMediaItem)));
assertThat(result.exportResult.videoFrameCount)
.isEqualTo(4 * imageFrameCount + 3 * VIDEO_FRAME_COUNT_FOR_MP4_ASSET);
}
/** Creates an {@link EditedMediaItem} with image, with duration of one second. */
private static EditedMediaItem createImageEditedMediaItem(String uri, int frameCount) {
return new EditedMediaItem.Builder(MediaItem.fromUri(uri))
.setDurationUs(C.MICROS_PER_SECOND)
.setFrameRate(frameCount)
.build();
}
/**
* Creates an {@link EditedMediaItem} with video, with audio removed and a {@link Presentation} of
* specified {@code height}.
*/
private static EditedMediaItem createVideoEditedMediaItem(String uri, int height) {
return new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(uri)))
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
ImmutableList.of(Presentation.createForHeight(height))))
.setRemoveAudio(true)
.build();
}
private static Composition buildComposition(ImmutableList<EditedMediaItem> editedMediaItems) {
return new Composition.Builder(ImmutableList.of(new EditedMediaItemSequence(editedMediaItems)))
.setEffects(
new Effects(
ImmutableList.of(),
ImmutableList.of(
// To ensure that software encoders can encode.
Presentation.createForWidthAndHeight(
/* width= */ 480, /* height= */ 360, Presentation.LAYOUT_SCALE_TO_FIT))))
.build();
}
}
......@@ -147,9 +147,6 @@ import org.checkerframework.dataflow.qual.Pure;
if (presentation != null) {
effectsWithPresentation.add(presentation);
}
@VideoFrameProcessor.InputType
int inputType =
MimeTypes.isVideo(firstInputFormat.sampleMimeType) ? INPUT_TYPE_SURFACE : INPUT_TYPE_BITMAP;
try {
videoFrameProcessor =
videoFrameProcessorFactory.create(
......@@ -158,7 +155,6 @@ import org.checkerframework.dataflow.qual.Pure;
debugViewProvider,
videoFrameProcessorInputColor,
videoFrameProcessorOutputColor,
inputType,
/* renderFramesAutomatically= */ true,
MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
......@@ -218,8 +214,15 @@ import org.checkerframework.dataflow.qual.Pure;
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(mediaItemOffsetUs.get())
.build());
videoFrameProcessor.registerInputStream(
MimeTypes.isVideo(trackFormat.sampleMimeType) ? INPUT_TYPE_SURFACE : INPUT_TYPE_BITMAP);
String mimeType = checkNotNull(trackFormat.sampleMimeType);
if (MimeTypes.isVideo(mimeType)) {
videoFrameProcessor.registerInputStream(INPUT_TYPE_SURFACE);
} else if (MimeTypes.isImage(mimeType)) {
videoFrameProcessor.registerInputStream(INPUT_TYPE_BITMAP);
} else {
throw new IllegalArgumentException("MIME type not supported " + mimeType);
}
}
mediaItemOffsetUs.addAndGet(durationUs);
}
......
......@@ -275,7 +275,6 @@ public final class VideoFrameProcessorTestRunner {
DebugViewProvider.NONE,
inputColorInfo,
outputColorInfo,
inputType,
/* renderFramesAutomatically= */ true,
MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
......@@ -311,6 +310,7 @@ public final class VideoFrameProcessorTestRunner {
videoFrameProcessingEnded = true;
}
});
videoFrameProcessor.registerInputStream(inputType);
}
public void processFirstFrameAndEnd() throws Exception {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment