Commit 43110806 by kimvde Committed by Marc Baechinger

Specify the types of samples output by AssetLoader

This is useful if an app passes a custom AssetLoader that can only
output encoded or decoded samples.

PiperOrigin-RevId: 497097492
parent 7fc2cdbe
......@@ -16,14 +16,21 @@
package androidx.media3.transformer;
import static java.lang.annotation.ElementType.TYPE_USE;
import android.content.Context;
import android.os.Looper;
import androidx.annotation.IntDef;
import androidx.annotation.IntRange;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.UnstableApi;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Provides media data to a {@linkplain Transformer}.
......@@ -121,7 +128,7 @@ public interface AssetLoader {
* <ul>
* <li>{@linkplain #onDurationUs(long)} Report} the duration of the input media.
* <li>{@linkplain #onTrackCount(int) Report} the number of output tracks.
* <li>{@linkplain #onTrackAdded(Format, long, long) Add} the information for each track.
* <li>{@linkplain #onTrackAdded(Format, int, long, long) Add} the information for each track.
* </ul>
*
* <p>This listener can be called from any thread.
......@@ -144,6 +151,8 @@ public interface AssetLoader {
*
* @param format The {@link Format} of the input media (prior to video slow motion flattening or
* to decoding).
* @param supportedOutputTypes The output {@linkplain SupportedOutputTypes types} supported by
* this asset loader for the track added. At least one output type must be supported.
* @param streamStartPositionUs The start position of the stream (offset by {@code
* streamOffsetUs}), in microseconds.
* @param streamOffsetUs The offset that will be added to the timestamps to make sure they are
......@@ -154,7 +163,10 @@ public interface AssetLoader {
* SamplePipeline.Input}.
*/
SamplePipeline.Input onTrackAdded(
Format format, long streamStartPositionUs, long streamOffsetUs)
Format format,
@SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs)
throws TransformationException;
/**
......@@ -164,6 +176,25 @@ public interface AssetLoader {
void onError(Exception e);
}
/**
* Supported output types of an asset loader. Possible flag values are {@link
* #SUPPORTED_OUTPUT_TYPE_ENCODED} and {@link #SUPPORTED_OUTPUT_TYPE_DECODED}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@Target(TYPE_USE)
@IntDef(
flag = true,
value = {
SUPPORTED_OUTPUT_TYPE_ENCODED,
SUPPORTED_OUTPUT_TYPE_DECODED,
})
@interface SupportedOutputTypes {}
/** Indicates that the asset loader can output encoded samples. */
int SUPPORTED_OUTPUT_TYPE_ENCODED = 1;
/** Indicates that the asset loader can output decoded samples. */
int SUPPORTED_OUTPUT_TYPE_DECODED = 1 << 1;
/** Starts the asset loader. */
void start();
......
......@@ -19,6 +19,8 @@ package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.decoder.DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED;
import static androidx.media3.exoplayer.source.SampleStream.FLAG_REQUIRE_FORMAT;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
import android.media.MediaCodec;
import androidx.annotation.Nullable;
......@@ -162,8 +164,11 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
return false;
}
Format inputFormat = checkNotNull(formatHolder.format);
@AssetLoader.SupportedOutputTypes
int supportedOutputTypes = SUPPORTED_OUTPUT_TYPE_ENCODED | SUPPORTED_OUTPUT_TYPE_DECODED;
samplePipelineInput =
assetLoaderListener.onTrackAdded(inputFormat, streamStartPositionUs, streamOffsetUs);
assetLoaderListener.onTrackAdded(
inputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
if (getTrackType() == C.TRACK_TYPE_VIDEO && flattenForSlowMotion) {
sefVideoSlowMotionFlattener = new SefSlowMotionFlattener(inputFormat);
}
......
......@@ -16,6 +16,9 @@
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
import static androidx.media3.transformer.TransformationException.ERROR_CODE_MUXING_FAILED;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
import static java.lang.annotation.ElementType.TYPE_USE;
......@@ -413,7 +416,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public SamplePipeline.Input onTrackAdded(
Format format, long streamStartPositionUs, long streamOffsetUs)
Format format,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs)
throws TransformationException {
if (tracksAddedCount == 0) {
// Call setTrackCount() methods here so that they are called from the same thread as the
......@@ -423,7 +429,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
SamplePipeline samplePipeline =
getSamplePipeline(format, streamStartPositionUs, streamOffsetUs);
getSamplePipeline(format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
internalHandler.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, samplePipeline).sendToTarget();
int samplePipelineIndex = tracksAddedCount;
......@@ -437,7 +443,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
.setChannelCount(2)
.build();
SamplePipeline audioSamplePipeline =
getSamplePipeline(silentAudioFormat, streamStartPositionUs, streamOffsetUs);
getSamplePipeline(
silentAudioFormat,
SUPPORTED_OUTPUT_TYPE_DECODED,
streamStartPositionUs,
streamOffsetUs);
internalHandler
.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, audioSamplePipeline)
.sendToTarget();
......@@ -469,9 +479,23 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
private SamplePipeline getSamplePipeline(
Format inputFormat, long streamStartPositionUs, long streamOffsetUs)
Format inputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs)
throws TransformationException {
if (MimeTypes.isAudio(inputFormat.sampleMimeType) && shouldTranscodeAudio(inputFormat)) {
checkState(supportedOutputTypes != 0);
boolean isAudio = MimeTypes.isAudio(inputFormat.sampleMimeType);
boolean shouldTranscode =
isAudio
? shouldTranscodeAudio(inputFormat)
: shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs);
boolean assetLoaderNeverDecodes = (supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_DECODED) == 0;
checkState(!shouldTranscode || !assetLoaderNeverDecodes);
boolean assetLoaderAlwaysDecodes =
(supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_ENCODED) == 0;
boolean shouldUseTranscodingPipeline = shouldTranscode || assetLoaderAlwaysDecodes;
if (isAudio && shouldUseTranscodingPipeline) {
return new AudioTranscodingSamplePipeline(
inputFormat,
streamStartPositionUs,
......@@ -482,8 +506,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
encoderFactory,
muxerWrapper,
fallbackListener);
} else if (MimeTypes.isVideo(inputFormat.sampleMimeType)
&& shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs)) {
} else if (shouldUseTranscodingPipeline) {
return new VideoTranscodingSamplePipeline(
context,
inputFormat,
......
......@@ -16,6 +16,10 @@
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.test.utils.robolectric.RobolectricUtil.runLooperUntil;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_DECODED;
import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCODED;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_UNAVAILABLE;
......@@ -40,8 +44,10 @@ import android.os.ParcelFileDescriptor;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.Util;
import androidx.media3.exoplayer.audio.SonicAudioProcessor;
import androidx.media3.exoplayer.source.DefaultMediaSourceFactory;
......@@ -230,7 +236,7 @@ public final class TransformerEndToEndTest {
}
@Test
public void startTransformation_concurrentTransformations_throwsError() throws Exception {
public void startTransformation_concurrentTransformations_throwsError() {
Transformer transformer = createTransformerBuilder(/* enableFallback= */ false).build();
MediaItem mediaItem = MediaItem.fromUri(ASSET_URI_PREFIX + FILE_VIDEO_ONLY);
......@@ -634,6 +640,42 @@ public final class TransformerEndToEndTest {
}
@Test
public void startTransformation_withAssetLoaderAlwaysDecoding_pipelineExpectsDecoded()
throws Exception {
AtomicReference<SamplePipeline.Input> samplePipelineInputRef = new AtomicReference<>();
Transformer transformer =
createTransformerBuilder(/* enableFallback= */ false)
.setAssetLoaderFactory(
new FakeAssetLoader.Factory(SUPPORTED_OUTPUT_TYPE_DECODED, samplePipelineInputRef))
.build();
MediaItem mediaItem = MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_VIDEO);
transformer.startTransformation(mediaItem, outputPath);
runLooperUntil(transformer.getApplicationLooper(), () -> samplePipelineInputRef.get() != null);
assertThat(samplePipelineInputRef.get().expectsDecodedData()).isTrue();
}
@Test
public void startTransformation_withAssetLoaderNotDecodingAndDecodingNeeded_completesWithError()
throws Exception {
Transformer transformer =
createTransformerBuilder(/* enableFallback= */ false)
.setAudioProcessors(ImmutableList.of(new SonicAudioProcessor()))
.setAssetLoaderFactory(
new FakeAssetLoader.Factory(
SUPPORTED_OUTPUT_TYPE_ENCODED, /* samplePipelineInputRef= */ null))
.build();
MediaItem mediaItem = MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_VIDEO);
transformer.startTransformation(mediaItem, outputPath);
TransformationException transformationException =
TransformerTestRunner.runUntilError(transformer);
assertThat(transformationException).hasCauseThat().isInstanceOf(IllegalStateException.class);
}
@Test
public void getProgress_knownDuration_returnsConsistentStates() throws Exception {
Transformer transformer = createTransformerBuilder(/* enableFallback= */ false).build();
MediaItem mediaItem = MediaItem.fromUri(ASSET_URI_PREFIX + FILE_VIDEO_ONLY);
......@@ -1038,4 +1080,120 @@ public final class TransformerEndToEndTest {
}
}
}
private static final class FakeAssetLoader implements AssetLoader {
public static final class Factory implements AssetLoader.Factory {
private final @SupportedOutputTypes int supportedOutputTypes;
@Nullable private final AtomicReference<SamplePipeline.Input> samplePipelineInputRef;
@Nullable private AssetLoader.Listener listener;
public Factory(
@SupportedOutputTypes int supportedOutputTypes,
@Nullable AtomicReference<SamplePipeline.Input> samplePipelineInputRef) {
this.supportedOutputTypes = supportedOutputTypes;
this.samplePipelineInputRef = samplePipelineInputRef;
}
@Override
public AssetLoader.Factory setContext(Context context) {
return this;
}
@Override
public AssetLoader.Factory setMediaItem(MediaItem mediaItem) {
return this;
}
@Override
public AssetLoader.Factory setRemoveAudio(boolean removeAudio) {
return this;
}
@Override
public AssetLoader.Factory setRemoveVideo(boolean removeVideo) {
return this;
}
@Override
public AssetLoader.Factory setFlattenVideoForSlowMotion(boolean flattenVideoForSlowMotion) {
return this;
}
@Override
public AssetLoader.Factory setDecoderFactory(Codec.DecoderFactory decoderFactory) {
return this;
}
@Override
public AssetLoader.Factory setLooper(Looper looper) {
return this;
}
@Override
public AssetLoader.Factory setListener(Listener listener) {
this.listener = listener;
return this;
}
@Override
public AssetLoader.Factory setClock(Clock clock) {
return this;
}
@Override
public AssetLoader createAssetLoader() {
return new FakeAssetLoader(
checkNotNull(listener), supportedOutputTypes, samplePipelineInputRef);
}
}
private final AssetLoader.Listener listener;
private final @SupportedOutputTypes int supportedOutputTypes;
@Nullable private final AtomicReference<SamplePipeline.Input> samplePipelineInputRef;
public FakeAssetLoader(
Listener listener,
@SupportedOutputTypes int supportedOutputTypes,
@Nullable AtomicReference<SamplePipeline.Input> samplePipelineInputRef) {
this.listener = listener;
this.supportedOutputTypes = supportedOutputTypes;
this.samplePipelineInputRef = samplePipelineInputRef;
}
@Override
public void start() {
listener.onDurationUs(10_000_000);
listener.onTrackCount(1);
Format format =
new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_AAC)
.setSampleRate(44100)
.setChannelCount(2)
.build();
try {
SamplePipeline.Input samplePipelineInput =
listener.onTrackAdded(
format,
supportedOutputTypes,
/* streamStartPositionUs= */ 0,
/* streamOffsetUs= */ 0);
if (samplePipelineInputRef != null) {
samplePipelineInputRef.set(samplePipelineInput);
}
} catch (TransformationException e) {
throw new IllegalStateException(e);
}
}
@Override
public @Transformer.ProgressState int getProgress(ProgressHolder progressHolder) {
return 0;
}
@Override
public void release() {}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment